diff --git a/ISTA-DASLab/results_2024-05-14-21-00-56.json b/ISTA-DASLab/results_2024-05-14-21-00-56.json new file mode 100644 index 0000000000000000000000000000000000000000..615d3a380ab6c91d7b5b05be2c37af260495630c --- /dev/null +++ b/ISTA-DASLab/results_2024-05-14-21-00-56.json @@ -0,0 +1,651 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-14-21-00-56", + "total_evaluation_time_secondes": "", + "model_name": "ISTA-DASLab/Mistral-7B-Instruct-v0.2-AQLM-2Bit-2x8", + "model_sha": "", + "model_dtype": "2bit", + "model_size": 2.27, + "model_params": 7, + "quant_type": "AQLM", + "precision": "2bit" + }, + "results": { + "harness|winogrande|0": { + "acc,none": 0.6408839779005525, + "acc_stderr,none": 0.013483115202120241, + "alias": "winogrande" + }, + "harness|hellaswag|0": { + "acc,none": 0.5616411073491336, + "acc_stderr,none": 0.004951717622007965, + "acc_norm,none": 0.7385978888667596, + "acc_norm_stderr,none": 0.00438500499892336, + "alias": "hellaswag" + }, + "harness|openbookqa|0": { + "acc,none": 0.264, + "acc_stderr,none": 0.019732885585922098, + "acc_norm,none": 0.376, + "acc_norm_stderr,none": 0.021683827539286115, + "alias": "openbookqa" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.4638922888616891, + "acc_stderr,none": 0.017457800422268625, + "alias": "truthfulqa_mc1" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.6316639380666816, + "acc_stderr,none": 0.015414325790023395, + "alias": "truthfulqa_mc2" + }, + "harness|mmlu|0": { + "acc,none": 0.4016521862982481, + "acc_stderr,none": 0.004063454732088467, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.3742826780021254, + "acc_stderr,none": 0.006918946610656743 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.23809523809523808, + "acc_stderr,none": 0.03809523809523809 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.503030303030303, + "acc_stderr,none": 0.03904272341431857 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.49019607843137253, + "acc_stderr,none": 0.03508637358630572 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.5485232067510548, + "acc_stderr,none": 0.0323936001739747 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.5950413223140496, + "acc_stderr,none": 0.04481137755942469 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.4444444444444444, + "acc_stderr,none": 0.04803752235190193 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.44785276073619634, + "acc_stderr,none": 0.03906947479456602 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.4479768786127168, + "acc_stderr,none": 0.02677299065336182 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.23798882681564246, + "acc_stderr,none": 0.014242630070574884 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.42765273311897106, + "acc_stderr,none": 0.028099240775809563 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.3888888888888889, + "acc_stderr,none": 0.027125115513166854 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.33833116036505867, + "acc_stderr,none": 0.0120842656263442 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.4619883040935672, + "acc_stderr,none": 0.03823727092882307 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.4280656581911812, + "acc_stderr,none": 0.00874063365417062 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.35, + "acc_stderr,none": 0.0479372485441102 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.4075471698113208, + "acc_stderr,none": 0.030242233800854498 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.35260115606936415, + "acc_stderr,none": 0.03643037168958548 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.27, + "acc_stderr,none": 0.0446196043338474 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.42152466367713004, + "acc_stderr,none": 0.033141902221106564 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.4854368932038835, + "acc_stderr,none": 0.049486373240266356 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.6239316239316239, + "acc_stderr,none": 0.031733936329694824 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.36, + "acc_stderr,none": 0.048241815132442176 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.51213282247765, + "acc_stderr,none": 0.017874698667491345 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.434640522875817, + "acc_stderr,none": 0.028384256704883044 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.32978723404255317, + "acc_stderr,none": 0.0280459469420424 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.34191176470588236, + "acc_stderr,none": 0.02881472242225418 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.3192771084337349, + "acc_stderr,none": 0.036293353299478595 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.46246343841403964, + "acc_stderr,none": 0.008844738621946012 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.2719298245614035, + "acc_stderr,none": 0.04185774424022056 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.5202020202020202, + "acc_stderr,none": 0.03559443565563919 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.5544041450777202, + "acc_stderr,none": 0.03587014986075659 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.36923076923076925, + "acc_stderr,none": 0.024468615241478916 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.41596638655462187, + "acc_stderr,none": 0.03201650100739615 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.44954128440366975, + "acc_stderr,none": 0.02132788141782337 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.3969465648854962, + "acc_stderr,none": 0.04291135671009225 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.4035947712418301, + "acc_stderr,none": 0.019848280168401164 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.5818181818181818, + "acc_stderr,none": 0.047245774057315705 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.5591836734693878, + "acc_stderr,none": 0.03178419114175363 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.6268656716417911, + "acc_stderr,none": 0.03419832608176008 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.68, + "acc_stderr,none": 0.046882617226215034 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.35712020298128766, + "acc_stderr,none": 0.008497330653183912 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.35, + "acc_stderr,none": 0.0479372485441102 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.45185185185185184, + "acc_stderr,none": 0.04299268905480864 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.39473684210526316, + "acc_stderr,none": 0.039777499346220734 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.375, + "acc_stderr,none": 0.04048439222695598 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.25, + "acc_stderr,none": 0.04351941398892446 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.38, + "acc_stderr,none": 0.048783173121456316 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.31, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.23529411764705882, + "acc_stderr,none": 0.04220773659171451 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.47, + "acc_stderr,none": 0.050161355804659205 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.3446808510638298, + "acc_stderr,none": 0.03106898596312215 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.3931034482758621, + "acc_stderr,none": 0.040703290137070705 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.35714285714285715, + "acc_stderr,none": 0.024677862841332783 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.4161290322580645, + "acc_stderr,none": 0.028040981380761543 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.3251231527093596, + "acc_stderr,none": 0.032957975663112704 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.48, + "acc_stderr,none": 0.050211673156867795 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.3, + "acc_stderr,none": 0.02794045713622841 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.3509933774834437, + "acc_stderr,none": 0.03896981964257375 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.3101851851851852, + "acc_stderr,none": 0.03154696285656628 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.30357142857142855, + "acc_stderr,none": 0.04364226155841044 + }, + "harness|arc:easy|0": { + "acc,none": 0.7529461279461279, + "acc_stderr,none": 0.00885005516145924, + "acc_norm,none": 0.7066498316498316, + "acc_norm_stderr,none": 0.009342508331708563, + "alias": "arc_easy" + }, + "harness|boolq|0": { + "acc,none": 0.7825688073394496, + "acc_stderr,none": 0.007214641080602781, + "alias": "boolq" + }, + "harness|lambada:openai|0": { + "perplexity,none": 6.578457255981092, + "perplexity_stderr,none": 0.19631273674006466, + "acc,none": 0.6089656510770425, + "acc_stderr,none": 0.006798544197091019, + "alias": "lambada_openai" + }, + "harness|arc:challenge|0": { + "acc,none": 0.45563139931740615, + "acc_stderr,none": 0.014553749939306864, + "acc_norm,none": 0.48976109215017066, + "acc_norm_stderr,none": 0.014608326906285015, + "alias": "arc_challenge" + }, + "harness|piqa|0": { + "acc,none": 0.7742110990206746, + "acc_stderr,none": 0.00975498067091733, + "acc_norm,none": 0.7758433079434167, + "acc_norm_stderr,none": 0.009729897956410027, + "alias": "piqa" + } + }, + "task_info": { + "model": "ISTA-DASLab/Mistral-7B-Instruct-v0.2-AQLM-2Bit-2x8", + "revision": "main", + "private": false, + "params": 7, + "architectures": "MistralForCausalLM", + "quant_type": "AQLM", + "precision": "2bit", + "model_params": 7, + "model_size": 2.27, + "weight_dtype": "int2", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Waiting", + "submitted_time": "2024-05-13T11:54:45Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "in_group_size": 8, + "linear_weights_not_to_quantize": [ + "model.layers.0.input_layernorm.weight", + "model.layers.0.post_attention_layernorm.weight", + "model.layers.1.input_layernorm.weight", + "model.layers.1.post_attention_layernorm.weight", + "model.layers.2.input_layernorm.weight", + "model.layers.2.post_attention_layernorm.weight", + "model.layers.3.input_layernorm.weight", + "model.layers.3.post_attention_layernorm.weight", + "model.layers.4.input_layernorm.weight", + "model.layers.4.post_attention_layernorm.weight", + "model.layers.5.input_layernorm.weight", + "model.layers.5.post_attention_layernorm.weight", + "model.layers.6.input_layernorm.weight", + "model.layers.6.post_attention_layernorm.weight", + "model.layers.7.input_layernorm.weight", + "model.layers.7.post_attention_layernorm.weight", + "model.layers.8.input_layernorm.weight", + "model.layers.8.post_attention_layernorm.weight", + "model.layers.9.input_layernorm.weight", + "model.layers.9.post_attention_layernorm.weight", + "model.layers.10.input_layernorm.weight", + "model.layers.10.post_attention_layernorm.weight", + "model.layers.11.input_layernorm.weight", + "model.layers.11.post_attention_layernorm.weight", + "model.layers.12.input_layernorm.weight", + "model.layers.12.post_attention_layernorm.weight", + "model.layers.13.input_layernorm.weight", + "model.layers.13.post_attention_layernorm.weight", + "model.layers.14.input_layernorm.weight", + "model.layers.14.post_attention_layernorm.weight", + "model.layers.15.input_layernorm.weight", + "model.layers.15.post_attention_layernorm.weight", + "model.layers.16.input_layernorm.weight", + "model.layers.16.post_attention_layernorm.weight", + "model.layers.17.input_layernorm.weight", + "model.layers.17.post_attention_layernorm.weight", + "model.layers.18.input_layernorm.weight", + "model.layers.18.post_attention_layernorm.weight", + "model.layers.19.input_layernorm.weight", + "model.layers.19.post_attention_layernorm.weight", + "model.layers.20.input_layernorm.weight", + "model.layers.20.post_attention_layernorm.weight", + "model.layers.21.input_layernorm.weight", + "model.layers.21.post_attention_layernorm.weight", + "model.layers.22.input_layernorm.weight", + "model.layers.22.post_attention_layernorm.weight", + "model.layers.23.input_layernorm.weight", + "model.layers.23.post_attention_layernorm.weight", + "model.layers.24.input_layernorm.weight", + "model.layers.24.post_attention_layernorm.weight", + "model.layers.25.input_layernorm.weight", + "model.layers.25.post_attention_layernorm.weight", + "model.layers.26.input_layernorm.weight", + "model.layers.26.post_attention_layernorm.weight", + "model.layers.27.input_layernorm.weight", + "model.layers.27.post_attention_layernorm.weight", + "model.layers.28.input_layernorm.weight", + "model.layers.28.post_attention_layernorm.weight", + "model.layers.29.input_layernorm.weight", + "model.layers.29.post_attention_layernorm.weight", + "model.layers.30.input_layernorm.weight", + "model.layers.30.post_attention_layernorm.weight", + "model.layers.31.input_layernorm.weight", + "model.layers.31.post_attention_layernorm.weight", + "model.embed_tokens.weight", + "model.norm.weight", + "lm_head.weight" + ], + "nbits_per_codebook": 8, + "num_codebooks": 2, + "out_group_size": 1, + "quant_method": "aqlm" + }, + "versions": { + "harness|winogrande|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|arc:easy|0": 1.0, + "harness|boolq|0": 2.0, + "harness|lambada:openai|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|piqa|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715687560.163722, + "config": { + "model": "hf", + "model_args": "pretrained=ISTA-DASLab/Mistral-7B-Instruct-v0.2-AQLM-2Bit-2x8,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/ISTA-DASLab/results_2024-05-15-05-17-12.json b/ISTA-DASLab/results_2024-05-15-05-17-12.json new file mode 100644 index 0000000000000000000000000000000000000000..2743165091cdf3885ec16eb30e5edf3dc139892f --- /dev/null +++ b/ISTA-DASLab/results_2024-05-15-05-17-12.json @@ -0,0 +1,651 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-15-05-17-12", + "total_evaluation_time_secondes": "", + "model_name": "ISTA-DASLab/Meta-Llama-3-8B-Instruct-AQLM-2Bit-1x16", + "model_sha": "", + "model_dtype": "2bit", + "model_size": 4.08, + "model_params": 7, + "quant_type": "AQLM", + "precision": "2bit" + }, + "results": { + "harness|arc:easy|0": { + "acc,none": 0.7693602693602694, + "acc_stderr,none": 0.008643708884504999, + "acc_norm,none": 0.742003367003367, + "acc_norm_stderr,none": 0.008977970005203405, + "alias": "arc_easy" + }, + "harness|openbookqa|0": { + "acc,none": 0.322, + "acc_stderr,none": 0.020916668330019882, + "acc_norm,none": 0.396, + "acc_norm_stderr,none": 0.021893529941665817, + "alias": "openbookqa" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.501517122289186, + "acc_stderr,none": 0.015111724525572797, + "alias": "truthfulqa_mc2" + }, + "harness|mmlu|0": { + "acc,none": 0.5641646489104116, + "acc_stderr,none": 0.0039754730963606965, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5175345377258236, + "acc_stderr,none": 0.006842487140618199 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.47619047619047616, + "acc_stderr,none": 0.04467062628403273 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7272727272727273, + "acc_stderr,none": 0.03477691162163659 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.7450980392156863, + "acc_stderr,none": 0.030587591351604246 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.7468354430379747, + "acc_stderr,none": 0.028304657943035293 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.7024793388429752, + "acc_stderr,none": 0.04173349148083499 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7037037037037037, + "acc_stderr,none": 0.04414343666854933 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.6871165644171779, + "acc_stderr,none": 0.03642914578292405 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.6416184971098265, + "acc_stderr,none": 0.025816756791584204 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.25251396648044694, + "acc_stderr,none": 0.01453033020146865 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6591639871382636, + "acc_stderr,none": 0.026920841260776155 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.6450617283950617, + "acc_stderr,none": 0.02662415247884585 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.43415906127770537, + "acc_stderr,none": 0.01265903323706725 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7309941520467836, + "acc_stderr,none": 0.0340105262010409 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.6501448342452526, + "acc_stderr,none": 0.008324635072589546 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.64, + "acc_stderr,none": 0.04824181513244218 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.6792452830188679, + "acc_stderr,none": 0.02872750295788026 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.5780346820809249, + "acc_stderr,none": 0.0376574669386515 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.38, + "acc_stderr,none": 0.048783173121456316 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6457399103139013, + "acc_stderr,none": 0.03210062154134987 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.7669902912621359, + "acc_stderr,none": 0.04185832598928315 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8247863247863247, + "acc_stderr,none": 0.02490443909891822 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.68, + "acc_stderr,none": 0.046882617226215034 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.7522349936143039, + "acc_stderr,none": 0.015438083080568963 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.6339869281045751, + "acc_stderr,none": 0.027582811415159617 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.4787234042553192, + "acc_stderr,none": 0.029800481645628693 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.5845588235294118, + "acc_stderr,none": 0.029935342707877746 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.463855421686747, + "acc_stderr,none": 0.038823108508905954 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.6499837504062398, + "acc_stderr,none": 0.00839741312665264 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.37719298245614036, + "acc_stderr,none": 0.04559522141958216 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7222222222222222, + "acc_stderr,none": 0.03191178226713547 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.7772020725388601, + "acc_stderr,none": 0.030031147977641538 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.541025641025641, + "acc_stderr,none": 0.025265525491284295 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.5882352941176471, + "acc_stderr,none": 0.031968769891957786 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.7577981651376147, + "acc_stderr,none": 0.01836817630659862 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.7175572519083969, + "acc_stderr,none": 0.03948406125768361 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.5686274509803921, + "acc_stderr,none": 0.020036393768352635 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6, + "acc_stderr,none": 0.0469237132203465 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.636734693877551, + "acc_stderr,none": 0.030789051139030806 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.7711442786069652, + "acc_stderr,none": 0.029705284056772436 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.81, + "acc_stderr,none": 0.03942772444036624 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.4652711703139867, + "acc_stderr,none": 0.008646280447225355 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.34, + "acc_stderr,none": 0.04760952285695235 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.5703703703703704, + "acc_stderr,none": 0.04276349494376599 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.5526315789473685, + "acc_stderr,none": 0.04046336883978251 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.6388888888888888, + "acc_stderr,none": 0.04016660030451233 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.36, + "acc_stderr,none": 0.04824181513244218 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.45, + "acc_stderr,none": 0.05 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.34, + "acc_stderr,none": 0.04760952285695235 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.3431372549019608, + "acc_stderr,none": 0.04724007352383889 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.71, + "acc_stderr,none": 0.045604802157206845 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.4425531914893617, + "acc_stderr,none": 0.03246956919789958 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5241379310344828, + "acc_stderr,none": 0.0416180850350153 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.3941798941798942, + "acc_stderr,none": 0.02516798233389414 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.6741935483870968, + "acc_stderr,none": 0.0266620105785671 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.41379310344827586, + "acc_stderr,none": 0.03465304488406796 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.6, + "acc_stderr,none": 0.049236596391733084 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.3074074074074074, + "acc_stderr,none": 0.028133252578815642 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.4105960264900662, + "acc_stderr,none": 0.04016689594849927 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.41203703703703703, + "acc_stderr,none": 0.03356787758160835 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.38392857142857145, + "acc_stderr,none": 0.04616143075028547 + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.33047735618115054, + "acc_stderr,none": 0.016466769613698314, + "alias": "truthfulqa_mc1" + }, + "harness|boolq|0": { + "acc,none": 0.8033639143730887, + "acc_stderr,none": 0.006951528536402847, + "alias": "boolq" + }, + "harness|piqa|0": { + "acc,none": 0.7742110990206746, + "acc_stderr,none": 0.00975498067091734, + "acc_norm,none": 0.7731229597388466, + "acc_norm_stderr,none": 0.009771584259215153, + "alias": "piqa" + }, + "harness|arc:challenge|0": { + "acc,none": 0.45819112627986347, + "acc_stderr,none": 0.014560220308714697, + "acc_norm,none": 0.49146757679180886, + "acc_norm_stderr,none": 0.014609263165632191, + "alias": "arc_challenge" + }, + "harness|hellaswag|0": { + "acc,none": 0.5487950607448715, + "acc_stderr,none": 0.004965963647210322, + "acc_norm,none": 0.7263493328022307, + "acc_norm_stderr,none": 0.0044492062959223195, + "alias": "hellaswag" + }, + "harness|winogrande|0": { + "acc,none": 0.696921862667719, + "acc_stderr,none": 0.01291672746263447, + "alias": "winogrande" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.721237013130463, + "perplexity_stderr,none": 0.0953372165080242, + "acc,none": 0.6904715699592471, + "acc_stderr,none": 0.006440732259116663, + "alias": "lambada_openai" + } + }, + "task_info": { + "model": "ISTA-DASLab/Meta-Llama-3-8B-Instruct-AQLM-2Bit-1x16", + "revision": "main", + "private": false, + "params": 7, + "architectures": "LlamaForCausalLM", + "quant_type": "AQLM", + "precision": "2bit", + "model_params": 7, + "model_size": 4.08, + "weight_dtype": "int2", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Waiting", + "submitted_time": "2024-05-13T11:54:45Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "in_group_size": 8, + "linear_weights_not_to_quantize": [ + "model.layers.0.input_layernorm.weight", + "model.layers.0.post_attention_layernorm.weight", + "model.layers.1.input_layernorm.weight", + "model.layers.1.post_attention_layernorm.weight", + "model.layers.2.input_layernorm.weight", + "model.layers.2.post_attention_layernorm.weight", + "model.layers.3.input_layernorm.weight", + "model.layers.3.post_attention_layernorm.weight", + "model.layers.4.input_layernorm.weight", + "model.layers.4.post_attention_layernorm.weight", + "model.layers.5.input_layernorm.weight", + "model.layers.5.post_attention_layernorm.weight", + "model.layers.6.input_layernorm.weight", + "model.layers.6.post_attention_layernorm.weight", + "model.layers.7.input_layernorm.weight", + "model.layers.7.post_attention_layernorm.weight", + "model.layers.8.input_layernorm.weight", + "model.layers.8.post_attention_layernorm.weight", + "model.layers.9.input_layernorm.weight", + "model.layers.9.post_attention_layernorm.weight", + "model.layers.10.input_layernorm.weight", + "model.layers.10.post_attention_layernorm.weight", + "model.layers.11.input_layernorm.weight", + "model.layers.11.post_attention_layernorm.weight", + "model.layers.12.input_layernorm.weight", + "model.layers.12.post_attention_layernorm.weight", + "model.layers.13.input_layernorm.weight", + "model.layers.13.post_attention_layernorm.weight", + "model.layers.14.input_layernorm.weight", + "model.layers.14.post_attention_layernorm.weight", + "model.layers.15.input_layernorm.weight", + "model.layers.15.post_attention_layernorm.weight", + "model.layers.16.input_layernorm.weight", + "model.layers.16.post_attention_layernorm.weight", + "model.layers.17.input_layernorm.weight", + "model.layers.17.post_attention_layernorm.weight", + "model.layers.18.input_layernorm.weight", + "model.layers.18.post_attention_layernorm.weight", + "model.layers.19.input_layernorm.weight", + "model.layers.19.post_attention_layernorm.weight", + "model.layers.20.input_layernorm.weight", + "model.layers.20.post_attention_layernorm.weight", + "model.layers.21.input_layernorm.weight", + "model.layers.21.post_attention_layernorm.weight", + "model.layers.22.input_layernorm.weight", + "model.layers.22.post_attention_layernorm.weight", + "model.layers.23.input_layernorm.weight", + "model.layers.23.post_attention_layernorm.weight", + "model.layers.24.input_layernorm.weight", + "model.layers.24.post_attention_layernorm.weight", + "model.layers.25.input_layernorm.weight", + "model.layers.25.post_attention_layernorm.weight", + "model.layers.26.input_layernorm.weight", + "model.layers.26.post_attention_layernorm.weight", + "model.layers.27.input_layernorm.weight", + "model.layers.27.post_attention_layernorm.weight", + "model.layers.28.input_layernorm.weight", + "model.layers.28.post_attention_layernorm.weight", + "model.layers.29.input_layernorm.weight", + "model.layers.29.post_attention_layernorm.weight", + "model.layers.30.input_layernorm.weight", + "model.layers.30.post_attention_layernorm.weight", + "model.layers.31.input_layernorm.weight", + "model.layers.31.post_attention_layernorm.weight", + "model.embed_tokens.weight", + "model.norm.weight", + "lm_head.weight" + ], + "nbits_per_codebook": 16, + "num_codebooks": 1, + "out_group_size": 1, + "quant_method": "aqlm" + }, + "versions": { + "harness|arc:easy|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|boolq|0": 2.0, + "harness|piqa|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|lambada:openai|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715715118.262416, + "config": { + "model": "hf", + "model_args": "pretrained=ISTA-DASLab/Meta-Llama-3-8B-Instruct-AQLM-2Bit-1x16,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/ISTA-DASLab/results_2024-05-19-14-09-41.json b/ISTA-DASLab/results_2024-05-19-14-09-41.json new file mode 100644 index 0000000000000000000000000000000000000000..33596b51cb39da53013ddcc2bea80f85d2546fe8 --- /dev/null +++ b/ISTA-DASLab/results_2024-05-19-14-09-41.json @@ -0,0 +1,586 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-19-14-09-41", + "total_evaluation_time_secondes": "", + "model_name": "ISTA-DASLab/Llama-2-7b-AQLM-2Bit-1x16-hf", + "model_sha": "", + "model_dtype": "2bit", + "model_size": 2.38, + "model_params": 6.48, + "quant_type": "AQLM", + "precision": "2bit" + }, + "results": { + "harness|hellaswag|0": { + "acc,none": 0.5342561242780323, + "acc_stderr,none": 0.004978056798794863, + "acc_norm,none": 0.7136028679545907, + "acc_norm_stderr,none": 0.004511533039406169, + "alias": "hellaswag" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.36398040864303677, + "acc_stderr,none": 0.013782783819030715, + "alias": "truthfulqa_mc2" + }, + "harness|openbookqa|0": { + "acc,none": 0.306, + "acc_stderr,none": 0.02062956999834541, + "acc_norm,none": 0.412, + "acc_norm_stderr,none": 0.02203367799374086, + "alias": "openbookqa" + }, + "harness|winogrande|0": { + "acc,none": 0.654301499605367, + "acc_stderr,none": 0.01336659695193438, + "alias": "winogrande" + }, + "harness|lambada:openai|0": { + "perplexity,none": 4.091725727763611, + "perplexity_stderr,none": 0.08814292215669535, + "acc,none": 0.7083252474286823, + "acc_stderr,none": 0.006332538704566833, + "alias": "lambada_openai" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.2350061199510404, + "acc_stderr,none": 0.014843061507731608, + "alias": "truthfulqa_mc1" + }, + "harness|mmlu|0": { + "acc,none": 0.3503774391112377, + "acc_stderr,none": 0.003987221774638447, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.3477151965993624, + "acc_stderr,none": 0.006839503265838798 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.30158730158730157, + "acc_stderr,none": 0.04104947269903394 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.45454545454545453, + "acc_stderr,none": 0.038881769216741004 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.4166666666666667, + "acc_stderr,none": 0.034602283272391704 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.4008438818565401, + "acc_stderr,none": 0.031900803894732356 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.5454545454545454, + "acc_stderr,none": 0.04545454545454548 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.37037037037037035, + "acc_stderr,none": 0.04668408033024931 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.4294478527607362, + "acc_stderr,none": 0.03889066619112722 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.407514450867052, + "acc_stderr,none": 0.026454578146931505 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.23687150837988827, + "acc_stderr,none": 0.014219570788103982 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.41479099678456594, + "acc_stderr,none": 0.02798268045975956 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.4444444444444444, + "acc_stderr,none": 0.027648477877413324 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.2966101694915254, + "acc_stderr,none": 0.011665946586082844 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.5029239766081871, + "acc_stderr,none": 0.03834759370936839 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.35854522046990667, + "acc_stderr,none": 0.008558316270165067 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.34, + "acc_stderr,none": 0.04760952285695236 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.35471698113207545, + "acc_stderr,none": 0.02944517532819959 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.31213872832369943, + "acc_stderr,none": 0.035331333893236574 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.28, + "acc_stderr,none": 0.045126085985421276 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.3183856502242152, + "acc_stderr,none": 0.03126580522513713 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.36893203883495146, + "acc_stderr,none": 0.04777615181156739 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.44017094017094016, + "acc_stderr,none": 0.032520741720630506 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.4, + "acc_stderr,none": 0.049236596391733084 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.4061302681992337, + "acc_stderr,none": 0.017562037406478912 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.4215686274509804, + "acc_stderr,none": 0.028275490156791434 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.26595744680851063, + "acc_stderr,none": 0.026358065698880592 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.3161764705882353, + "acc_stderr,none": 0.028245687391462916 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.26506024096385544, + "acc_stderr,none": 0.03436024037944967 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.37146571335716605, + "acc_stderr,none": 0.008625018116535118 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.24561403508771928, + "acc_stderr,none": 0.040493392977481425 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.3888888888888889, + "acc_stderr,none": 0.0347327959083696 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.47668393782383417, + "acc_stderr,none": 0.03604513672442206 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.3, + "acc_stderr,none": 0.023234581088428494 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.3235294117647059, + "acc_stderr,none": 0.030388353551886845 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.3926605504587156, + "acc_stderr,none": 0.020937505161201093 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.3893129770992366, + "acc_stderr,none": 0.04276486542814591 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.3349673202614379, + "acc_stderr,none": 0.019094228167000307 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.2909090909090909, + "acc_stderr,none": 0.04350271442923243 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.35918367346938773, + "acc_stderr,none": 0.03071356045510849 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.527363184079602, + "acc_stderr,none": 0.035302355173346824 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.56, + "acc_stderr,none": 0.04988876515698589 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.32572153504598794, + "acc_stderr,none": 0.00831768388964144 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.24, + "acc_stderr,none": 0.04292346959909284 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.3851851851851852, + "acc_stderr,none": 0.042039210401562783 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.3684210526315789, + "acc_stderr,none": 0.03925523381052932 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.3263888888888889, + "acc_stderr,none": 0.03921067198982266 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.28, + "acc_stderr,none": 0.045126085985421255 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.28, + "acc_stderr,none": 0.04512608598542127 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.39, + "acc_stderr,none": 0.04902071300001974 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.20588235294117646, + "acc_stderr,none": 0.04023382273617748 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.44, + "acc_stderr,none": 0.04988876515698589 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.31063829787234043, + "acc_stderr,none": 0.03025123757921317 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.41379310344827586, + "acc_stderr,none": 0.041042692118062316 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.2857142857142857, + "acc_stderr,none": 0.02326651221373056 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.36451612903225805, + "acc_stderr,none": 0.027379871229943252 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.35960591133004927, + "acc_stderr,none": 0.03376458246509567 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.31, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.29259259259259257, + "acc_stderr,none": 0.027738969632176088 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.2913907284768212, + "acc_stderr,none": 0.03710185726119994 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.3611111111111111, + "acc_stderr,none": 0.03275773486101 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.25892857142857145, + "acc_stderr,none": 0.04157751539865629 + }, + "harness|arc:easy|0": { + "acc,none": 0.7403198653198653, + "acc_stderr,none": 0.008996990428562219, + "acc_norm,none": 0.7133838383838383, + "acc_norm_stderr,none": 0.009278551100969293, + "alias": "arc_easy" + }, + "harness|arc:challenge|0": { + "acc,none": 0.39590443686006827, + "acc_stderr,none": 0.014291228393536587, + "acc_norm,none": 0.41467576791808874, + "acc_norm_stderr,none": 0.014397070564409172, + "alias": "arc_challenge" + }, + "harness|piqa|0": { + "acc,none": 0.7687704026115343, + "acc_stderr,none": 0.009837063180625324, + "acc_norm,none": 0.7704026115342764, + "acc_norm_stderr,none": 0.009812682950815199, + "alias": "piqa" + }, + "harness|boolq|0": { + "acc,none": 0.7155963302752294, + "acc_stderr,none": 0.00789031224598877, + "alias": "boolq" + } + }, + "task_info": { + "model": "ISTA-DASLab/Llama-2-7b-AQLM-2Bit-1x16-hf", + "revision": "main", + "private": false, + "params": 2.38, + "architectures": "LlamaForCausalLM", + "quant_type": "AQLM", + "precision": "2bit", + "model_params": 6.48, + "model_size": 2.38, + "weight_dtype": "int2", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Waiting", + "submitted_time": "2024-05-15T03:44:59Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "in_group_size": 8, + "linear_weights_not_to_quantize": [ + "model.embed_tokens.weight", + "lm_head.weight" + ], + "nbits_per_codebook": 16, + "num_codebooks": 1, + "out_group_size": 1, + "quant_method": "aqlm" + }, + "versions": { + "harness|hellaswag|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|openbookqa|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|arc:easy|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|piqa|0": 1.0, + "harness|boolq|0": 2.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1716092125.7070472, + "config": { + "model": "hf", + "model_args": "pretrained=ISTA-DASLab/Llama-2-7b-AQLM-2Bit-1x16-hf,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 4, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/ISTA-DASLab/results_2024-05-20-03-47-37.json b/ISTA-DASLab/results_2024-05-20-03-47-37.json new file mode 100644 index 0000000000000000000000000000000000000000..1c8f981ba0f934dcb6b3232ea94e8d30cf0d2a14 --- /dev/null +++ b/ISTA-DASLab/results_2024-05-20-03-47-37.json @@ -0,0 +1,586 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-20-03-47-37", + "total_evaluation_time_secondes": "", + "model_name": "ISTA-DASLab/Llama-2-7b-AQLM-2Bit-8x8-hf", + "model_sha": "", + "model_dtype": "2bit", + "model_size": 2.73, + "model_params": 6.48, + "quant_type": "AQLM", + "precision": "2bit" + }, + "results": { + "harness|openbookqa|0": { + "acc,none": 0.284, + "acc_stderr,none": 0.02018670369357085, + "acc_norm,none": 0.4, + "acc_norm_stderr,none": 0.021930844120728505, + "alias": "openbookqa" + }, + "harness|lambada:openai|0": { + "perplexity,none": 4.623725999863417, + "perplexity_stderr,none": 0.10657475064064027, + "acc,none": 0.6811566078012808, + "acc_stderr,none": 0.006492684061449838, + "alias": "lambada_openai" + }, + "harness|mmlu|0": { + "acc,none": 0.3007406352371457, + "acc_stderr,none": 0.003854246733008758, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.3026567481402763, + "acc_stderr,none": 0.006671499990771424 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.21428571428571427, + "acc_stderr,none": 0.03670066451047182 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.32727272727272727, + "acc_stderr,none": 0.03663974994391241 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.3627450980392157, + "acc_stderr,none": 0.03374499356319355 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.4008438818565401, + "acc_stderr,none": 0.031900803894732356 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.371900826446281, + "acc_stderr,none": 0.04412015806624503 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.35185185185185186, + "acc_stderr,none": 0.04616631111801715 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.3006134969325153, + "acc_stderr,none": 0.0360251131880677 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.3352601156069364, + "acc_stderr,none": 0.025416003773165555 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.23575418994413408, + "acc_stderr,none": 0.014196375686290804 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.3215434083601286, + "acc_stderr,none": 0.026527724079528872 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.3487654320987654, + "acc_stderr,none": 0.02651759772446501 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.2907431551499348, + "acc_stderr,none": 0.011598062372851981 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.32748538011695905, + "acc_stderr,none": 0.035993357714560276 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.3270035403926617, + "acc_stderr,none": 0.008393111920442035 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.27, + "acc_stderr,none": 0.044619604333847415 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.33962264150943394, + "acc_stderr,none": 0.02914690474779833 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.2832369942196532, + "acc_stderr,none": 0.03435568056047873 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.35, + "acc_stderr,none": 0.047937248544110196 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.26905829596412556, + "acc_stderr,none": 0.029763779406874975 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.2815533980582524, + "acc_stderr,none": 0.04453254836326469 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.3717948717948718, + "acc_stderr,none": 0.03166098891888078 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.41, + "acc_stderr,none": 0.049431107042371025 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.3550446998722861, + "acc_stderr,none": 0.017112085772772994 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.3562091503267974, + "acc_stderr,none": 0.027420477662629235 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.30141843971631205, + "acc_stderr,none": 0.02737412888263115 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.23529411764705882, + "acc_stderr,none": 0.02576725201085597 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.37349397590361444, + "acc_stderr,none": 0.03765845117168862 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.30744231394215144, + "acc_stderr,none": 0.00829988495163527 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.2631578947368421, + "acc_stderr,none": 0.04142439719489362 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.29797979797979796, + "acc_stderr,none": 0.032586303838365555 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.38341968911917096, + "acc_stderr,none": 0.03508984236295342 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.2564102564102564, + "acc_stderr,none": 0.022139081103971527 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.28991596638655465, + "acc_stderr,none": 0.029472485833136077 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.344954128440367, + "acc_stderr,none": 0.020380605405066966 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.3511450381679389, + "acc_stderr,none": 0.04186445163013751 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.30392156862745096, + "acc_stderr,none": 0.01860755213127983 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.36363636363636365, + "acc_stderr,none": 0.04607582090719976 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.22857142857142856, + "acc_stderr,none": 0.026882144922307744 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.3333333333333333, + "acc_stderr,none": 0.03333333333333334 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.31, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.2654614652711703, + "acc_stderr,none": 0.007843275093064069 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.2814814814814815, + "acc_stderr,none": 0.03885004245800254 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.28289473684210525, + "acc_stderr,none": 0.03665349695640767 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.2569444444444444, + "acc_stderr,none": 0.03653946969442099 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.26, + "acc_stderr,none": 0.0440844002276808 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.25, + "acc_stderr,none": 0.04351941398892446 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.31, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.20588235294117646, + "acc_stderr,none": 0.04023382273617747 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.4, + "acc_stderr,none": 0.049236596391733084 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.3276595744680851, + "acc_stderr,none": 0.030683020843230997 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.2620689655172414, + "acc_stderr,none": 0.036646663372252565 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.23809523809523808, + "acc_stderr,none": 0.02193587808118476 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.3064516129032258, + "acc_stderr,none": 0.026226485652553873 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.2315270935960591, + "acc_stderr,none": 0.029678333141444455 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.33, + "acc_stderr,none": 0.047258156262526045 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.25555555555555554, + "acc_stderr,none": 0.02659393910184408 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.2052980132450331, + "acc_stderr,none": 0.03297986648473835 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.19444444444444445, + "acc_stderr,none": 0.026991454502036737 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.21428571428571427, + "acc_stderr,none": 0.03894641120044792 + }, + "harness|boolq|0": { + "acc,none": 0.7003058103975535, + "acc_stderr,none": 0.00801263880645437, + "alias": "boolq" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.4019049189158086, + "acc_stderr,none": 0.014095106904013666, + "alias": "truthfulqa_mc2" + }, + "harness|arc:easy|0": { + "acc,none": 0.6641414141414141, + "acc_stderr,none": 0.009691180932083496, + "acc_norm,none": 0.6380471380471381, + "acc_norm_stderr,none": 0.009860991466688476, + "alias": "arc_easy" + }, + "harness|winogrande|0": { + "acc,none": 0.6471981057616417, + "acc_stderr,none": 0.01342972810178896, + "alias": "winogrande" + }, + "harness|hellaswag|0": { + "acc,none": 0.4977096195976897, + "acc_stderr,none": 0.004989729059957427, + "acc_norm,none": 0.6783509261103365, + "acc_norm_stderr,none": 0.004661544991583015, + "alias": "hellaswag" + }, + "harness|piqa|0": { + "acc,none": 0.7377584330794341, + "acc_stderr,none": 0.01026250256517245, + "acc_norm,none": 0.7480957562568009, + "acc_norm_stderr,none": 0.010128421335088683, + "alias": "piqa" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.2631578947368421, + "acc_stderr,none": 0.015415241740237014, + "alias": "truthfulqa_mc1" + }, + "harness|arc:challenge|0": { + "acc,none": 0.3583617747440273, + "acc_stderr,none": 0.014012883334859864, + "acc_norm,none": 0.38054607508532423, + "acc_norm_stderr,none": 0.01418827771234983, + "alias": "arc_challenge" + } + }, + "task_info": { + "model": "ISTA-DASLab/Llama-2-7b-AQLM-2Bit-8x8-hf", + "revision": "main", + "private": false, + "params": 2.73, + "architectures": "LlamaForCausalLM", + "quant_type": "AQLM", + "precision": "2bit", + "model_params": 6.48, + "model_size": 2.73, + "weight_dtype": "int2", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Waiting", + "submitted_time": "2024-05-15T03:43:56Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "in_group_size": 32, + "linear_weights_not_to_quantize": [ + "model.embed_tokens.weight", + "lm_head.weight" + ], + "nbits_per_codebook": 8, + "num_codebooks": 8, + "out_group_size": 1, + "quant_method": "aqlm" + }, + "versions": { + "harness|openbookqa|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|boolq|0": 2.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|arc:easy|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|piqa|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|arc:challenge|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1716130571.6830513, + "config": { + "model": "hf", + "model_args": "pretrained=ISTA-DASLab/Llama-2-7b-AQLM-2Bit-8x8-hf,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 4, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/ISTA-DASLab/results_2024-05-20-10-51-30.json b/ISTA-DASLab/results_2024-05-20-10-51-30.json new file mode 100644 index 0000000000000000000000000000000000000000..99ba25a85b1327ccccc8209b277113593c840887 --- /dev/null +++ b/ISTA-DASLab/results_2024-05-20-10-51-30.json @@ -0,0 +1,592 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-20-10-51-30", + "total_evaluation_time_secondes": "", + "model_name": "ISTA-DASLab/Llama-3-8B-Instruct-GPTQ-4bit", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 5.74, + "model_params": 7.04, + "quant_type": "GPTQ", + "precision": "4bit" + }, + "results": { + "harness|piqa|0": { + "acc,none": 0.76550598476605, + "acc_stderr,none": 0.009885203143240548, + "acc_norm,none": 0.7742110990206746, + "acc_norm_stderr,none": 0.009754980670917334, + "alias": "piqa" + }, + "harness|hellaswag|0": { + "acc,none": 0.5633339972117108, + "acc_stderr,none": 0.00494958956767891, + "acc_norm,none": 0.7466640111531567, + "acc_norm_stderr,none": 0.004340328204135108, + "alias": "hellaswag" + }, + "harness|arc:easy|0": { + "acc,none": 0.7929292929292929, + "acc_stderr,none": 0.008314665023956551, + "acc_norm,none": 0.7643097643097643, + "acc_norm_stderr,none": 0.008709108323214466, + "alias": "arc_easy" + }, + "harness|winogrande|0": { + "acc,none": 0.7348066298342542, + "acc_stderr,none": 0.01240654946619286, + "alias": "winogrande" + }, + "harness|openbookqa|0": { + "acc,none": 0.334, + "acc_stderr,none": 0.021113492347743738, + "acc_norm,none": 0.428, + "acc_norm_stderr,none": 0.02214979066386193, + "alias": "openbookqa" + }, + "harness|boolq|0": { + "acc,none": 0.8269113149847095, + "acc_stderr,none": 0.006616927043886648, + "alias": "boolq" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.5061154188638777, + "perplexity_stderr,none": 0.10109082998573839, + "acc,none": 0.708131185717058, + "acc_stderr,none": 0.006333777168216385, + "alias": "lambada_openai" + }, + "harness|arc:challenge|0": { + "acc,none": 0.5034129692832765, + "acc_stderr,none": 0.014611050403244084, + "acc_norm,none": 0.5238907849829352, + "acc_norm_stderr,none": 0.014594701798071654, + "alias": "arc_challenge" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.3659730722154223, + "acc_stderr,none": 0.016862941684088383, + "alias": "truthfulqa_mc1" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.5191254909055326, + "acc_stderr,none": 0.015156374303657972, + "alias": "truthfulqa_mc2" + }, + "harness|mmlu|0": { + "acc,none": 0.6071072496795328, + "acc_stderr,none": 0.003928160330453562, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5604675876726887, + "acc_stderr,none": 0.006861969378650451 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.4444444444444444, + "acc_stderr,none": 0.04444444444444449 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7090909090909091, + "acc_stderr,none": 0.03546563019624336 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.8137254901960784, + "acc_stderr,none": 0.027325470966716333 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.8185654008438819, + "acc_stderr,none": 0.025085961144579658 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.7520661157024794, + "acc_stderr,none": 0.03941897526516301 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.75, + "acc_stderr,none": 0.04186091791394607 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.6932515337423313, + "acc_stderr,none": 0.036230899157241474 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.6676300578034682, + "acc_stderr,none": 0.025361168749688235 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.33854748603351953, + "acc_stderr,none": 0.01582670009648135 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6688102893890675, + "acc_stderr,none": 0.026730620728004924 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.6882716049382716, + "acc_stderr,none": 0.025773111169630433 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.47392438070404175, + "acc_stderr,none": 0.012752858346533133 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7426900584795322, + "acc_stderr,none": 0.03352799844161865 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.6810428065658192, + "acc_stderr,none": 0.00813434488979903 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.67, + "acc_stderr,none": 0.04725815626252607 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.6754716981132075, + "acc_stderr,none": 0.028815615713432118 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.630057803468208, + "acc_stderr,none": 0.0368122963339432 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.42, + "acc_stderr,none": 0.049604496374885836 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6367713004484304, + "acc_stderr,none": 0.032277904428505 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.7766990291262136, + "acc_stderr,none": 0.04123553189891431 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8675213675213675, + "acc_stderr,none": 0.022209309073165606 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.76, + "acc_stderr,none": 0.04292346959909283 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.7701149425287356, + "acc_stderr,none": 0.015046301846691826 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.6862745098039216, + "acc_stderr,none": 0.026568921015457166 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.4858156028368794, + "acc_stderr,none": 0.02981549448368206 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.6691176470588235, + "acc_stderr,none": 0.028582709753898428 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.5180722891566265, + "acc_stderr,none": 0.03889951252827217 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.7000324991875203, + "acc_stderr,none": 0.00808605616864146 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.4298245614035088, + "acc_stderr,none": 0.04657047260594963 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7474747474747475, + "acc_stderr,none": 0.030954055470365914 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8341968911917098, + "acc_stderr,none": 0.026839845022314415 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.5871794871794872, + "acc_stderr,none": 0.024962683564331803 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.6386554621848739, + "acc_stderr,none": 0.031204691225150016 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.7871559633027523, + "acc_stderr,none": 0.017549376389313694 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.7404580152671756, + "acc_stderr,none": 0.03844876139785271 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.6388888888888888, + "acc_stderr,none": 0.019431775677037317 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6818181818181818, + "acc_stderr,none": 0.044612721759105085 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.7142857142857143, + "acc_stderr,none": 0.02892058322067558 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.8159203980099502, + "acc_stderr,none": 0.027403859410786838 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.84, + "acc_stderr,none": 0.03684529491774709 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.513162067871868, + "acc_stderr,none": 0.008641503326837477 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.34, + "acc_stderr,none": 0.04760952285695236 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.6222222222222222, + "acc_stderr,none": 0.04188307537595853 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.6447368421052632, + "acc_stderr,none": 0.03894734487013316 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.6944444444444444, + "acc_stderr,none": 0.03852084696008534 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.33, + "acc_stderr,none": 0.047258156262526045 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.47, + "acc_stderr,none": 0.05016135580465919 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.31, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.43137254901960786, + "acc_stderr,none": 0.04928099597287534 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.72, + "acc_stderr,none": 0.04512608598542128 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.48936170212765956, + "acc_stderr,none": 0.03267862331014063 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.6344827586206897, + "acc_stderr,none": 0.04013124195424385 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.42328042328042326, + "acc_stderr,none": 0.025446365634406783 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7129032258064516, + "acc_stderr,none": 0.02573654274559452 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.4827586206896552, + "acc_stderr,none": 0.035158955511657 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.63, + "acc_stderr,none": 0.04852365870939099 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.3814814814814815, + "acc_stderr,none": 0.029616718927497593 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.45695364238410596, + "acc_stderr,none": 0.04067325174247443 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.4861111111111111, + "acc_stderr,none": 0.03408655867977749 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.4375, + "acc_stderr,none": 0.04708567521880525 + } + }, + "task_info": { + "model": "ISTA-DASLab/Llama-3-8B-Instruct-GPTQ-4bit", + "revision": "main", + "private": false, + "params": 5.74, + "architectures": "LlamaForCausalLM", + "quant_type": "GPTQ", + "precision": "4bit", + "model_params": 7.04, + "model_size": 5.74, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Waiting", + "submitted_time": "2024-05-16T08:11:55Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "bits": 4, + "checkpoint_format": "gptq", + "damp_percent": 0.01, + "desc_act": true, + "exllama_config": { + "version": 2 + }, + "group_size": 128, + "model_file_base_name": null, + "model_name_or_path": null, + "quant_method": "gptq", + "static_groups": false, + "sym": true, + "true_sequential": true, + "use_exllama": true + }, + "versions": { + "harness|piqa|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|boolq|0": 2.0, + "harness|lambada:openai|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1716167855.9616175, + "config": { + "model": "hf", + "model_args": "pretrained=ISTA-DASLab/Llama-3-8B-Instruct-GPTQ-4bit,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/Intel/results_2024-04-30-16-11-38.json b/Intel/results_2024-04-30-16-11-38.json new file mode 100644 index 0000000000000000000000000000000000000000..fac1b8fe7ea6569ee0aaa8ca6f33a04ee69b44c5 --- /dev/null +++ b/Intel/results_2024-04-30-16-11-38.json @@ -0,0 +1,596 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-04-30-16-11-38", + "total_evaluation_time_secondes": "", + "model_name": "Intel/Mistral-7B-Instruct-v0.2-int4-inc", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 4.16, + "model_params": 7.04, + "quant_type": "AutoRound", + "precision": "4bit" + }, + "results": { + "harness|truthfulqa:mc2|0": { + "acc,none": 0.6594502792868876, + "acc_stderr,none": 0.015342480583463202, + "alias": "truthfulqa_mc2" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.5116279069767442, + "acc_stderr,none": 0.017498767175740084, + "alias": "truthfulqa_mc1" + }, + "harness|hellaswag|0": { + "acc,none": 0.6567416849233221, + "acc_stderr,none": 0.004738264944737176, + "acc_norm,none": 0.8312089225253934, + "acc_norm_stderr,none": 0.003738017734037969, + "alias": "hellaswag" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.434887262380849, + "perplexity_stderr,none": 0.07410456478433652, + "acc,none": 0.7089074325635553, + "acc_stderr,none": 0.0063288149295274675, + "alias": "lambada_openai" + }, + "harness|arc:challenge|0": { + "acc,none": 0.5537542662116041, + "acc_stderr,none": 0.014526705548539982, + "acc_norm,none": 0.5656996587030717, + "acc_norm_stderr,none": 0.014484703048857355, + "alias": "arc_challenge" + }, + "harness|openbookqa|0": { + "acc,none": 0.342, + "acc_stderr,none": 0.02123614719989926, + "acc_norm,none": 0.458, + "acc_norm_stderr,none": 0.02230396677426996, + "alias": "openbookqa" + }, + "harness|boolq|0": { + "acc,none": 0.8525993883792049, + "acc_stderr,none": 0.006200328377083518, + "alias": "boolq" + }, + "harness|arc:easy|0": { + "acc,none": 0.8143939393939394, + "acc_stderr,none": 0.007977770454202353, + "acc_norm,none": 0.7655723905723906, + "acc_norm_stderr,none": 0.008692920419348174, + "alias": "arc_easy" + }, + "harness|winogrande|0": { + "acc,none": 0.739542225730071, + "acc_stderr,none": 0.012334833671998292, + "alias": "winogrande" + }, + "harness|piqa|0": { + "acc,none": 0.8073993471164309, + "acc_stderr,none": 0.009200649707017573, + "acc_norm,none": 0.8106637649619152, + "acc_norm_stderr,none": 0.009140767676615017, + "alias": "piqa" + }, + "harness|mmlu|0": { + "acc,none": 0.5865973508047286, + "acc_stderr,none": 0.003952459169410318, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5385759829968119, + "acc_stderr,none": 0.006893694786566793 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.38095238095238093, + "acc_stderr,none": 0.043435254289490965 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7151515151515152, + "acc_stderr,none": 0.03524390844511781 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.75, + "acc_stderr,none": 0.03039153369274154 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.7637130801687764, + "acc_stderr,none": 0.027652153144159256 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.7355371900826446, + "acc_stderr,none": 0.040261875275912046 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.6944444444444444, + "acc_stderr,none": 0.044531975073749834 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.754601226993865, + "acc_stderr,none": 0.03380939813943354 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.6445086705202312, + "acc_stderr,none": 0.025770292082977243 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.3675977653631285, + "acc_stderr,none": 0.01612554382355294 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6559485530546624, + "acc_stderr,none": 0.02698147804364803 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.6697530864197531, + "acc_stderr,none": 0.026168298456732842 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.41460234680573665, + "acc_stderr,none": 0.012582597058908284 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.8070175438596491, + "acc_stderr,none": 0.030267457554898465 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.6598004505954297, + "acc_stderr,none": 0.008186771432404356 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.64, + "acc_stderr,none": 0.04824181513244218 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.6716981132075471, + "acc_stderr,none": 0.02890159361241178 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.5664739884393064, + "acc_stderr,none": 0.03778621079092056 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.36, + "acc_stderr,none": 0.04824181513244218 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6053811659192825, + "acc_stderr,none": 0.03280400504755291 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.7281553398058253, + "acc_stderr,none": 0.044052680241409216 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8589743589743589, + "acc_stderr,none": 0.02280138253459753 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.67, + "acc_stderr,none": 0.047258156262526066 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.7841634738186463, + "acc_stderr,none": 0.014711684386139956 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.6535947712418301, + "acc_stderr,none": 0.027245613047215355 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.450354609929078, + "acc_stderr,none": 0.029680105565029036 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.6654411764705882, + "acc_stderr,none": 0.02866199620233531 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.4457831325301205, + "acc_stderr,none": 0.03869543323472101 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.68020799480013, + "acc_stderr,none": 0.00818336991166656 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.42105263157894735, + "acc_stderr,none": 0.046446020912223177 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7626262626262627, + "acc_stderr,none": 0.03031371053819889 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.7927461139896373, + "acc_stderr,none": 0.029252823291803638 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.558974358974359, + "acc_stderr,none": 0.025174048384000752 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.6428571428571429, + "acc_stderr,none": 0.031124619309328177 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.7944954128440367, + "acc_stderr,none": 0.017324352325016015 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.6793893129770993, + "acc_stderr,none": 0.04093329229834278 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.5816993464052288, + "acc_stderr,none": 0.019955975145835546 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.7, + "acc_stderr,none": 0.04389311454644287 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.6693877551020408, + "acc_stderr,none": 0.0301164262965406 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.8308457711442786, + "acc_stderr,none": 0.02650859065623325 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.84, + "acc_stderr,none": 0.03684529491774709 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.49476688867745006, + "acc_stderr,none": 0.008669668588404067 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.27, + "acc_stderr,none": 0.0446196043338474 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.5925925925925926, + "acc_stderr,none": 0.04244633238353228 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.6118421052631579, + "acc_stderr,none": 0.03965842097512744 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.6388888888888888, + "acc_stderr,none": 0.04016660030451233 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.44, + "acc_stderr,none": 0.04988876515698589 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.53, + "acc_stderr,none": 0.05016135580465919 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.37, + "acc_stderr,none": 0.048523658709391 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.4411764705882353, + "acc_stderr,none": 0.049406356306056595 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.67, + "acc_stderr,none": 0.04725815626252609 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.4978723404255319, + "acc_stderr,none": 0.03268572658667492 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5793103448275863, + "acc_stderr,none": 0.04113914981189261 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.3994708994708995, + "acc_stderr,none": 0.02522545028406788 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.6806451612903226, + "acc_stderr,none": 0.026522709674667768 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.4729064039408867, + "acc_stderr,none": 0.035128190778761066 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.62, + "acc_stderr,none": 0.048783173121456316 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.31851851851851853, + "acc_stderr,none": 0.02840653309060846 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.3443708609271523, + "acc_stderr,none": 0.03879687024073327 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.49074074074074076, + "acc_stderr,none": 0.034093869469927006 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.5089285714285714, + "acc_stderr,none": 0.04745033255489123 + } + }, + "task_info": { + "model": "Intel/Mistral-7B-Instruct-v0.2-int4-inc", + "local": true, + "revision": "main", + "private": false, + "params": 7, + "architectures": "MistralForCausalLM", + "quant_type": "AutoRound", + "precision": "4bit", + "model_params": 7, + "model_size": 4.524, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-23T15:44:22Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "autoround_version": "0.1", + "bits": 4, + "damp_percent": 0.01, + "desc_act": false, + "enable_minmax_tuning": true, + "group_size": 128, + "is_marlin_format": false, + "iters": 1000, + "lr": 0.001, + "minmax_lr": 0.001, + "model_file_base_name": "model", + "model_name_or_path": null, + "quant_method": "gptq", + "scale_dtype": "torch.float32", + "static_groups": false, + "sym": false, + "true_sequential": false, + "use_quant_input": true + }, + "versions": { + "harness|truthfulqa:mc2|0": 2.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|hellaswag|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|boolq|0": 2.0, + "harness|arc:easy|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|piqa|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714460111.0164344, + "config": { + "model": "hf", + "model_args": "pretrained=Intel/Mistral-7B-Instruct-v0.2-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/Intel/results_2024-04-30-18-06-33.json b/Intel/results_2024-04-30-18-06-33.json new file mode 100644 index 0000000000000000000000000000000000000000..148d5725a3526ad8a338900462a281fc6a330df3 --- /dev/null +++ b/Intel/results_2024-04-30-18-06-33.json @@ -0,0 +1,596 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-04-30-18-06-33", + "total_evaluation_time_secondes": "", + "model_name": "Intel/opt-13b-int4-inc", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 7.6, + "model_params": 12.7, + "quant_type": "AutoRound", + "precision": "4bit" + }, + "results": { + "harness|truthfulqa:mc2|0": { + "acc,none": 0.3409183456381729, + "acc_stderr,none": 0.013323058554365342, + "alias": "truthfulqa_mc2" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.20563035495716034, + "acc_stderr,none": 0.014148482219460969, + "alias": "truthfulqa_mc1" + }, + "harness|boolq|0": { + "acc,none": 0.6801223241590214, + "acc_stderr,none": 0.00815789330083753, + "alias": "boolq" + }, + "harness|hellaswag|0": { + "acc,none": 0.5177255526787492, + "acc_stderr,none": 0.004986644894743126, + "acc_norm,none": 0.6910973909579765, + "acc_norm_stderr,none": 0.004610966122378296, + "alias": "hellaswag" + }, + "harness|winogrande|0": { + "acc,none": 0.6448303078137332, + "acc_stderr,none": 0.013450047479569256, + "alias": "winogrande" + }, + "harness|mmlu|0": { + "acc,none": 0.24562028201110953, + "acc_stderr,none": 0.0036317764545446356, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.24208289054197663, + "acc_stderr,none": 0.0062440898985705465 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.30952380952380953, + "acc_stderr,none": 0.04134913018303316 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.23030303030303031, + "acc_stderr,none": 0.03287666758603488 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.23039215686274508, + "acc_stderr,none": 0.02955429260569507 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.2109704641350211, + "acc_stderr,none": 0.02655837250266192 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.1652892561983471, + "acc_stderr,none": 0.03390780612972776 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.24074074074074073, + "acc_stderr,none": 0.041331194402438376 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.22699386503067484, + "acc_stderr,none": 0.03291099578615769 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.2832369942196532, + "acc_stderr,none": 0.024257901705323374 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.23798882681564246, + "acc_stderr,none": 0.014242630070574885 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.22508038585209003, + "acc_stderr,none": 0.023720088516179034 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.24691358024691357, + "acc_stderr,none": 0.023993501709042117 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.24315514993481094, + "acc_stderr,none": 0.010956556654417362 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.2807017543859649, + "acc_stderr,none": 0.034462962170884265 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.2378500160926939, + "acc_stderr,none": 0.007637855403720031 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.31, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.2188679245283019, + "acc_stderr,none": 0.025447863825108614 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.24277456647398843, + "acc_stderr,none": 0.0326926380614177 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.19, + "acc_stderr,none": 0.039427724440366234 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.273542600896861, + "acc_stderr,none": 0.029918586707798827 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.22330097087378642, + "acc_stderr,none": 0.04123553189891431 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.2264957264957265, + "acc_stderr,none": 0.027421007295392912 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.27, + "acc_stderr,none": 0.044619604333847394 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.24521072796934865, + "acc_stderr,none": 0.015384352284543932 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.24836601307189543, + "acc_stderr,none": 0.02473998135511359 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.2198581560283688, + "acc_stderr,none": 0.024706141070705477 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.18382352941176472, + "acc_stderr,none": 0.02352924218519311 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.2710843373493976, + "acc_stderr,none": 0.034605799075530276 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.2551186220344491, + "acc_stderr,none": 0.007849877136827197 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.22807017543859648, + "acc_stderr,none": 0.03947152782669415 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.26262626262626265, + "acc_stderr,none": 0.03135305009533084 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.25906735751295334, + "acc_stderr,none": 0.0316187791793541 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.3128205128205128, + "acc_stderr,none": 0.023507579020645368 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.2184873949579832, + "acc_stderr,none": 0.026841514322958934 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.23669724770642203, + "acc_stderr,none": 0.018224078117299074 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.2824427480916031, + "acc_stderr,none": 0.03948406125768362 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.22712418300653595, + "acc_stderr,none": 0.016949853279212383 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.24545454545454545, + "acc_stderr,none": 0.041220665028782834 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.23673469387755103, + "acc_stderr,none": 0.02721283588407315 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.2935323383084577, + "acc_stderr,none": 0.03220024104534205 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.34, + "acc_stderr,none": 0.04760952285695236 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.24928639391056137, + "acc_stderr,none": 0.007710745946445535 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.21, + "acc_stderr,none": 0.04093601807403326 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.2074074074074074, + "acc_stderr,none": 0.03502553170678318 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.26973684210526316, + "acc_stderr,none": 0.036117805602848975 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.2638888888888889, + "acc_stderr,none": 0.03685651095897532 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.23, + "acc_stderr,none": 0.04229525846816505 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.21, + "acc_stderr,none": 0.040936018074033256 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.23, + "acc_stderr,none": 0.04229525846816506 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.2549019607843137, + "acc_stderr,none": 0.04336432707993177 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.29, + "acc_stderr,none": 0.045604802157206845 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.23404255319148937, + "acc_stderr,none": 0.02767845257821239 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.2482758620689655, + "acc_stderr,none": 0.03600105692727771 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.2328042328042328, + "acc_stderr,none": 0.02176596167215453 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.2838709677419355, + "acc_stderr,none": 0.025649381063029244 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.21182266009852216, + "acc_stderr,none": 0.02874898368994107 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.23, + "acc_stderr,none": 0.042295258468165065 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.25555555555555554, + "acc_stderr,none": 0.026593939101844065 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.2980132450331126, + "acc_stderr,none": 0.03734535676787198 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.25, + "acc_stderr,none": 0.029531221160930918 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.3125, + "acc_stderr,none": 0.043994650575715215 + }, + "harness|arc:easy|0": { + "acc,none": 0.6717171717171717, + "acc_stderr,none": 0.009635749509262163, + "acc_norm,none": 0.6119528619528619, + "acc_norm_stderr,none": 0.009999295905750659, + "alias": "arc_easy" + }, + "harness|openbookqa|0": { + "acc,none": 0.278, + "acc_stderr,none": 0.02005583388807089, + "acc_norm,none": 0.382, + "acc_norm_stderr,none": 0.021750820591250834, + "alias": "openbookqa" + }, + "harness|piqa|0": { + "acc,none": 0.7573449401523396, + "acc_stderr,none": 0.01000200256970869, + "acc_norm,none": 0.764961915125136, + "acc_norm_stderr,none": 0.009893146688805345, + "alias": "piqa" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.8881756480152463, + "perplexity_stderr,none": 0.08322629498973114, + "acc,none": 0.6949349893266059, + "acc_stderr,none": 0.0064147592507735746, + "alias": "lambada_openai" + }, + "harness|arc:challenge|0": { + "acc,none": 0.3250853242320819, + "acc_stderr,none": 0.013688147309729117, + "acc_norm,none": 0.3532423208191126, + "acc_norm_stderr,none": 0.013967822714840053, + "alias": "arc_challenge" + } + }, + "task_info": { + "model": "Intel/opt-13b-int4-inc", + "local": true, + "revision": "main", + "private": false, + "params": 13, + "architectures": "OptForCausalLM", + "quant_type": "AutoRound", + "precision": "4bit", + "model_params": 13, + "model_size": 8, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-23T15:44:22Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "autoround_version": "0.1", + "bits": 4, + "damp_percent": 0.01, + "desc_act": false, + "enable_minmax_tuning": true, + "group_size": 128, + "is_marlin_format": false, + "iters": 1000, + "lr": 0.001, + "minmax_lr": 0.002, + "model_file_base_name": "model", + "model_name_or_path": null, + "quant_method": "gptq", + "scale_dtype": "torch.float32", + "static_groups": false, + "sym": false, + "true_sequential": false, + "use_quant_input": false + }, + "versions": { + "harness|truthfulqa:mc2|0": 2.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|boolq|0": 2.0, + "harness|hellaswag|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|arc:easy|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|piqa|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|arc:challenge|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714465126.9301486, + "config": { + "model": "hf", + "model_args": "pretrained=Intel/opt-13b-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/Intel/results_2024-04-30-18-42-01.json b/Intel/results_2024-04-30-18-42-01.json new file mode 100644 index 0000000000000000000000000000000000000000..f0b0e1600fa02b9a2dd89453d359276f64b2b8f6 --- /dev/null +++ b/Intel/results_2024-04-30-18-42-01.json @@ -0,0 +1,596 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-04-30-18-42-01", + "total_evaluation_time_secondes": "", + "model_name": "Intel/opt-1.3b-int4-inc", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 1.05, + "model_params": 1.22, + "quant_type": "AutoRound", + "precision": "4bit" + }, + "results": { + "harness|lambada:openai|0": { + "perplexity,none": 8.27607614433492, + "perplexity_stderr,none": 0.22453245796912197, + "acc,none": 0.5389093731806714, + "acc_stderr,none": 0.006944853492951909, + "alias": "lambada_openai" + }, + "harness|openbookqa|0": { + "acc,none": 0.214, + "acc_stderr,none": 0.018359797502387035, + "acc_norm,none": 0.326, + "acc_norm_stderr,none": 0.020984009562393557, + "alias": "openbookqa" + }, + "harness|arc:easy|0": { + "acc,none": 0.5673400673400674, + "acc_stderr,none": 0.01016630793264287, + "acc_norm,none": 0.5050505050505051, + "acc_norm_stderr,none": 0.010259260102565879, + "alias": "arc_easy" + }, + "harness|winogrande|0": { + "acc,none": 0.584846093133386, + "acc_stderr,none": 0.013848684086658587, + "alias": "winogrande" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.23378212974296206, + "acc_stderr,none": 0.01481619599193159, + "alias": "truthfulqa_mc1" + }, + "harness|arc:challenge|0": { + "acc,none": 0.23208191126279865, + "acc_stderr,none": 0.012336718284948856, + "acc_norm,none": 0.28668941979522183, + "acc_norm_stderr,none": 0.013214986329274777, + "alias": "arc_challenge" + }, + "harness|piqa|0": { + "acc,none": 0.7067464635473341, + "acc_stderr,none": 0.010621818421101924, + "acc_norm,none": 0.7100108813928183, + "acc_norm_stderr,none": 0.010586899128169326, + "alias": "piqa" + }, + "harness|hellaswag|0": { + "acc,none": 0.4075881298546106, + "acc_stderr,none": 0.004903815885983278, + "acc_norm,none": 0.5202150965943039, + "acc_norm_stderr,none": 0.004985701593898005, + "alias": "hellaswag" + }, + "harness|mmlu|0": { + "acc,none": 0.25096140150975643, + "acc_stderr,none": 0.003654540601790187, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.2503719447396387, + "acc_stderr,none": 0.006315777980926855 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.2777777777777778, + "acc_stderr,none": 0.04006168083848877 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.21212121212121213, + "acc_stderr,none": 0.03192271569548299 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.25980392156862747, + "acc_stderr,none": 0.03077855467869326 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.24472573839662448, + "acc_stderr,none": 0.02798569938703642 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.2809917355371901, + "acc_stderr,none": 0.04103203830514511 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.35185185185185186, + "acc_stderr,none": 0.046166311118017125 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.25766871165644173, + "acc_stderr,none": 0.03436150827846917 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.2630057803468208, + "acc_stderr,none": 0.023703099525258165 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.2435754189944134, + "acc_stderr,none": 0.01435591196476786 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.21543408360128619, + "acc_stderr,none": 0.02335022547547143 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.2808641975308642, + "acc_stderr,none": 0.02500646975579922 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.242503259452412, + "acc_stderr,none": 0.01094657096634879 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.2573099415204678, + "acc_stderr,none": 0.03352799844161865 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.2520115867396202, + "acc_stderr,none": 0.007772842543145439 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.25, + "acc_stderr,none": 0.04351941398892446 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.23773584905660378, + "acc_stderr,none": 0.02619980880756192 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.23121387283236994, + "acc_stderr,none": 0.03214737302029468 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.27, + "acc_stderr,none": 0.044619604333847394 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.34977578475336324, + "acc_stderr,none": 0.03200736719484503 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.2524271844660194, + "acc_stderr,none": 0.043012503996908764 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.24358974358974358, + "acc_stderr,none": 0.028120966503914404 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.28, + "acc_stderr,none": 0.04512608598542127 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.25287356321839083, + "acc_stderr,none": 0.01554337731371968 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.21895424836601307, + "acc_stderr,none": 0.02367908986180772 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.2801418439716312, + "acc_stderr,none": 0.026789172351140228 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.17279411764705882, + "acc_stderr,none": 0.02296606758558177 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.2891566265060241, + "acc_stderr,none": 0.03529486801511115 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.2365940851478713, + "acc_stderr,none": 0.007656528011004042 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.22807017543859648, + "acc_stderr,none": 0.03947152782669415 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.23737373737373738, + "acc_stderr,none": 0.030313710538198896 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.20207253886010362, + "acc_stderr,none": 0.02897908979429673 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.24358974358974358, + "acc_stderr,none": 0.021763733684173912 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.2184873949579832, + "acc_stderr,none": 0.02684151432295894 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.24036697247706423, + "acc_stderr,none": 0.01832060732096407 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.183206106870229, + "acc_stderr,none": 0.033927709264947335 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.28104575163398693, + "acc_stderr,none": 0.018185218954318082 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.2727272727272727, + "acc_stderr,none": 0.04265792110940589 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.1836734693877551, + "acc_stderr,none": 0.024789071332007636 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.23383084577114427, + "acc_stderr,none": 0.029929415408348387 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.2, + "acc_stderr,none": 0.04020151261036845 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.26482714874722485, + "acc_stderr,none": 0.007847018783828765 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.29, + "acc_stderr,none": 0.045604802157206824 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.34074074074074073, + "acc_stderr,none": 0.040943762699967946 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.2631578947368421, + "acc_stderr,none": 0.03583496176361062 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.25, + "acc_stderr,none": 0.03621034121889507 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.11, + "acc_stderr,none": 0.031446603773522035 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.27, + "acc_stderr,none": 0.04461960433384741 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.24, + "acc_stderr,none": 0.042923469599092816 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.21568627450980393, + "acc_stderr,none": 0.04092563958237655 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.29, + "acc_stderr,none": 0.045604802157206845 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.28085106382978725, + "acc_stderr,none": 0.02937917046412482 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.22758620689655173, + "acc_stderr,none": 0.03493950380131184 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.29894179894179895, + "acc_stderr,none": 0.023577604791655805 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.2645161290322581, + "acc_stderr,none": 0.02509189237885928 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.3054187192118227, + "acc_stderr,none": 0.03240661565868408 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.32, + "acc_stderr,none": 0.04688261722621504 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.24074074074074073, + "acc_stderr,none": 0.02606715922227579 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.25165562913907286, + "acc_stderr,none": 0.035433042343899844 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.23148148148148148, + "acc_stderr,none": 0.028765111718046955 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.26785714285714285, + "acc_stderr,none": 0.04203277291467764 + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.3799455888517004, + "acc_stderr,none": 0.014185571946003417, + "alias": "truthfulqa_mc2" + }, + "harness|boolq|0": { + "acc,none": 0.5785932721712538, + "acc_stderr,none": 0.008636344580414675, + "alias": "boolq" + } + }, + "task_info": { + "model": "Intel/opt-1.3b-int4-inc", + "local": true, + "revision": "main", + "private": false, + "params": 1.3, + "architectures": "OptForCausalLM", + "quant_type": "AutoRound", + "precision": "4bit", + "model_params": 1.3, + "model_size": 1.05, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-23T15:44:22Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "autoround_version": "0.1", + "bits": 4, + "damp_percent": 0.01, + "desc_act": false, + "enable_minmax_tuning": true, + "group_size": 128, + "is_marlin_format": false, + "iters": 1000, + "lr": 0.001, + "minmax_lr": 0.002, + "model_file_base_name": "model", + "model_name_or_path": null, + "quant_method": "gptq", + "scale_dtype": "torch.float32", + "static_groups": false, + "sym": false, + "true_sequential": false, + "use_quant_input": false + }, + "versions": { + "harness|lambada:openai|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|arc:challenge|0": 1.0, + "harness|piqa|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|boolq|0": 2.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714471925.0680776, + "config": { + "model": "hf", + "model_args": "pretrained=Intel/opt-1.3b-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 4, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/Intel/results_2024-04-30-20-44-29.json b/Intel/results_2024-04-30-20-44-29.json new file mode 100644 index 0000000000000000000000000000000000000000..66dcc707f812a7365c46cbfa60376bc14a280923 --- /dev/null +++ b/Intel/results_2024-04-30-20-44-29.json @@ -0,0 +1,596 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-04-30-20-44-29", + "total_evaluation_time_secondes": "", + "model_name": "Intel/bloom-7b1-int4-inc", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 6.8, + "model_params": 6.09, + "quant_type": "AutoRound", + "precision": "4bit" + }, + "results": { + "harness|winogrande|0": { + "acc,none": 0.6369376479873717, + "acc_stderr,none": 0.013515191866479221, + "alias": "winogrande" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.2350061199510404, + "acc_stderr,none": 0.014843061507731606, + "alias": "truthfulqa_mc1" + }, + "harness|boolq|0": { + "acc,none": 0.6293577981651376, + "acc_stderr,none": 0.008447316806409933, + "alias": "boolq" + }, + "harness|arc:challenge|0": { + "acc,none": 0.3054607508532423, + "acc_stderr,none": 0.013460080478002514, + "acc_norm,none": 0.3361774744027304, + "acc_norm_stderr,none": 0.01380485502620576, + "alias": "arc_challenge" + }, + "harness|piqa|0": { + "acc,none": 0.7263329706202394, + "acc_stderr,none": 0.010402184206229204, + "acc_norm,none": 0.735582154515778, + "acc_norm_stderr,none": 0.010289787244767168, + "alias": "piqa" + }, + "harness|hellaswag|0": { + "acc,none": 0.4618601872137024, + "acc_stderr,none": 0.004975243508752004, + "acc_norm,none": 0.619398526190002, + "acc_norm_stderr,none": 0.004845424524764082, + "alias": "hellaswag" + }, + "harness|lambada:openai|0": { + "perplexity,none": 6.685916235858979, + "perplexity_stderr,none": 0.17913874719457962, + "acc,none": 0.5728701727149234, + "acc_stderr,none": 0.006891601045518706, + "alias": "lambada_openai" + }, + "harness|openbookqa|0": { + "acc,none": 0.244, + "acc_stderr,none": 0.01922673489361459, + "acc_norm,none": 0.356, + "acc_norm_stderr,none": 0.021434712356072645, + "alias": "openbookqa" + }, + "harness|mmlu|0": { + "acc,none": 0.259792052414186, + "acc_stderr,none": 0.0036992967525030333, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.25696068012752393, + "acc_stderr,none": 0.006371837439598881 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.31746031746031744, + "acc_stderr,none": 0.04163453031302859 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.24242424242424243, + "acc_stderr,none": 0.03346409881055953 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.24509803921568626, + "acc_stderr,none": 0.03019028245350194 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.26582278481012656, + "acc_stderr,none": 0.028756799629658335 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.2727272727272727, + "acc_stderr,none": 0.04065578140908705 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.23148148148148148, + "acc_stderr,none": 0.04077494709252628 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.2147239263803681, + "acc_stderr,none": 0.03226219377286774 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.2745664739884393, + "acc_stderr,none": 0.02402774515526502 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.2435754189944134, + "acc_stderr,none": 0.014355911964767864 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.22186495176848875, + "acc_stderr,none": 0.023598858292863047 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.24382716049382716, + "acc_stderr,none": 0.023891879541959586 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.273142112125163, + "acc_stderr,none": 0.011380150567830398 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.25146198830409355, + "acc_stderr,none": 0.033275044238468436 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.24557450917283552, + "acc_stderr,none": 0.00770723810131652 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.28, + "acc_stderr,none": 0.04512608598542127 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.27547169811320754, + "acc_stderr,none": 0.027495663683724067 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.24855491329479767, + "acc_stderr,none": 0.03295304696818318 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.21, + "acc_stderr,none": 0.040936018074033256 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.17937219730941703, + "acc_stderr,none": 0.0257498195691928 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.30097087378640774, + "acc_stderr,none": 0.045416094465039476 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.2777777777777778, + "acc_stderr,none": 0.02934311479809447 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.32, + "acc_stderr,none": 0.046882617226215034 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.227330779054917, + "acc_stderr,none": 0.014987270640946024 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.2875816993464052, + "acc_stderr,none": 0.02591780611714716 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.26595744680851063, + "acc_stderr,none": 0.026358065698880592 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.22426470588235295, + "acc_stderr,none": 0.025336848563332355 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.1686746987951807, + "acc_stderr,none": 0.029152009627856544 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.27494312642183943, + "acc_stderr,none": 0.008053261916222852 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.2543859649122807, + "acc_stderr,none": 0.040969851398436716 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.30808080808080807, + "acc_stderr,none": 0.03289477330098615 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.32124352331606215, + "acc_stderr,none": 0.033699508685490674 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.2564102564102564, + "acc_stderr,none": 0.022139081103971545 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.27310924369747897, + "acc_stderr,none": 0.028942004040998164 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.29541284403669726, + "acc_stderr,none": 0.019560619182976 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.2366412213740458, + "acc_stderr,none": 0.03727673575596917 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.25326797385620914, + "acc_stderr,none": 0.01759348689536683 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.2818181818181818, + "acc_stderr,none": 0.04309118709946459 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.2979591836734694, + "acc_stderr,none": 0.02927956741106568 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.2537313432835821, + "acc_stderr,none": 0.030769444967296024 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.27, + "acc_stderr,none": 0.0446196043338474 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.26324135743736127, + "acc_stderr,none": 0.007846675733198834 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.25, + "acc_stderr,none": 0.04351941398892446 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.21481481481481482, + "acc_stderr,none": 0.035478541985608236 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.29605263157894735, + "acc_stderr,none": 0.037150621549989056 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.24305555555555555, + "acc_stderr,none": 0.03586879280080341 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.31, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.27, + "acc_stderr,none": 0.044619604333847394 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.22549019607843138, + "acc_stderr,none": 0.041583075330832865 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.24, + "acc_stderr,none": 0.04292346959909283 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.32340425531914896, + "acc_stderr,none": 0.030579442773610334 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.25517241379310346, + "acc_stderr,none": 0.03632984052707842 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.2566137566137566, + "acc_stderr,none": 0.022494510767503154 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.22580645161290322, + "acc_stderr,none": 0.023785577884181012 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.2660098522167488, + "acc_stderr,none": 0.031089826002937523 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.29, + "acc_stderr,none": 0.04560480215720684 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.22962962962962963, + "acc_stderr,none": 0.025644108639267624 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.2980132450331126, + "acc_stderr,none": 0.03734535676787198 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.26851851851851855, + "acc_stderr,none": 0.030225226160012386 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.29464285714285715, + "acc_stderr,none": 0.043270409325787296 + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.3900684489421272, + "acc_stderr,none": 0.014022176384445112, + "alias": "truthfulqa_mc2" + }, + "harness|arc:easy|0": { + "acc,none": 0.6443602693602694, + "acc_stderr,none": 0.009822854395535487, + "acc_norm,none": 0.5732323232323232, + "acc_norm_stderr,none": 0.010149141043955643, + "alias": "arc_easy" + } + }, + "task_info": { + "model": "Intel/bloom-7b1-int4-inc", + "local": true, + "revision": "main", + "private": false, + "params": 7, + "architectures": "BloomForCausalLM", + "quant_type": "AutoRound", + "precision": "4bit", + "model_params": 7, + "model_size": 6.8, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-23T15:44:22Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "autoround_version": "0.1", + "bits": 4, + "damp_percent": 0.01, + "desc_act": false, + "enable_minmax_tuning": true, + "group_size": 128, + "is_marlin_format": false, + "iters": 1000, + "lr": 0.001, + "minmax_lr": 0.001, + "model_file_base_name": "model", + "model_name_or_path": null, + "quant_method": "gptq", + "scale_dtype": "torch.float32", + "static_groups": false, + "sym": false, + "true_sequential": false, + "use_quant_input": true + }, + "versions": { + "harness|winogrande|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|boolq|0": 2.0, + "harness|arc:challenge|0": 1.0, + "harness|piqa|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|arc:easy|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714476866.6397374, + "config": { + "model": "hf", + "model_args": "pretrained=Intel/bloom-7b1-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/Intel/results_2024-04-30-21-43-07.json b/Intel/results_2024-04-30-21-43-07.json new file mode 100644 index 0000000000000000000000000000000000000000..92e05d18fa709d94f9f1c877b7b565c2313e5e1b --- /dev/null +++ b/Intel/results_2024-04-30-21-43-07.json @@ -0,0 +1,596 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-04-30-21-43-07", + "total_evaluation_time_secondes": "", + "model_name": "Intel/gpt-j-6b-int4-inc", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 4.59, + "model_params": 5.69, + "quant_type": "AutoRound", + "precision": "4bit" + }, + "results": { + "harness|arc:easy|0": { + "acc,none": 0.6708754208754208, + "acc_stderr,none": 0.009642048058060989, + "acc_norm,none": 0.61489898989899, + "acc_norm_stderr,none": 0.009985214798737251, + "alias": "arc_easy" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.3567771778055868, + "acc_stderr,none": 0.01353660390992912, + "alias": "truthfulqa_mc2" + }, + "harness|boolq|0": { + "acc,none": 0.6510703363914373, + "acc_stderr,none": 0.008336340399970096, + "alias": "boolq" + }, + "harness|arc:challenge|0": { + "acc,none": 0.3430034129692833, + "acc_stderr,none": 0.013872423223718166, + "acc_norm,none": 0.36006825938566556, + "acc_norm_stderr,none": 0.014027516814585186, + "alias": "arc_challenge" + }, + "harness|hellaswag|0": { + "acc,none": 0.48974307906791475, + "acc_stderr,none": 0.004988731406780659, + "acc_norm,none": 0.6563433578968333, + "acc_norm_stderr,none": 0.004739575380508871, + "alias": "hellaswag" + }, + "harness|winogrande|0": { + "acc,none": 0.6424625098658248, + "acc_stderr,none": 0.013470007443920691, + "alias": "winogrande" + }, + "harness|lambada:openai|0": { + "perplexity,none": 4.081184717841156, + "perplexity_stderr,none": 0.08880031107310538, + "acc,none": 0.6844556568988939, + "acc_stderr,none": 0.006474629636371581, + "alias": "lambada_openai" + }, + "harness|openbookqa|0": { + "acc,none": 0.282, + "acc_stderr,none": 0.020143572847290788, + "acc_norm,none": 0.388, + "acc_norm_stderr,none": 0.021814300984787635, + "alias": "openbookqa" + }, + "harness|piqa|0": { + "acc,none": 0.7448313384113167, + "acc_stderr,none": 0.010171571592521822, + "acc_norm,none": 0.7568008705114254, + "acc_norm_stderr,none": 0.010009611953858948, + "alias": "piqa" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.20930232558139536, + "acc_stderr,none": 0.01424121943478583, + "alias": "truthfulqa_mc1" + }, + "harness|mmlu|0": { + "acc,none": 0.2750320467169919, + "acc_stderr,none": 0.0037576659952161065, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.24654622741764082, + "acc_stderr,none": 0.0062818343189083165 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.3333333333333333, + "acc_stderr,none": 0.04216370213557835 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.23636363636363636, + "acc_stderr,none": 0.03317505930009179 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.25, + "acc_stderr,none": 0.03039153369274154 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.22362869198312235, + "acc_stderr,none": 0.027123298205229976 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.17355371900826447, + "acc_stderr,none": 0.03457272836917669 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.25, + "acc_stderr,none": 0.04186091791394607 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.22699386503067484, + "acc_stderr,none": 0.03291099578615769 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.2398843930635838, + "acc_stderr,none": 0.022989592543123567 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.24022346368715083, + "acc_stderr,none": 0.014288343803925319 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.2508038585209003, + "acc_stderr,none": 0.024619771956697168 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.27469135802469136, + "acc_stderr,none": 0.024836057868294677 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.2561929595827901, + "acc_stderr,none": 0.011149173153110582 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.1871345029239766, + "acc_stderr,none": 0.029913127232368053 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.276472481493402, + "acc_stderr,none": 0.008009070391301637 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.35, + "acc_stderr,none": 0.0479372485441102 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.2943396226415094, + "acc_stderr,none": 0.028049186315695248 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.26011560693641617, + "acc_stderr,none": 0.033450369167889904 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.27, + "acc_stderr,none": 0.044619604333847394 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.29596412556053814, + "acc_stderr,none": 0.030636591348699813 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.34951456310679613, + "acc_stderr,none": 0.047211885060971716 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.2863247863247863, + "acc_stderr,none": 0.02961432369045665 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.33, + "acc_stderr,none": 0.047258156262526045 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.23116219667943805, + "acc_stderr,none": 0.01507552323810108 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.3333333333333333, + "acc_stderr,none": 0.02699254433929724 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.2695035460992908, + "acc_stderr,none": 0.026469036818590627 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.23161764705882354, + "acc_stderr,none": 0.025626533803777562 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.30120481927710846, + "acc_stderr,none": 0.0357160923005348 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.3084172895677608, + "acc_stderr,none": 0.008289452693955061 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.24561403508771928, + "acc_stderr,none": 0.04049339297748142 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.2828282828282828, + "acc_stderr,none": 0.03208779558786752 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.3471502590673575, + "acc_stderr,none": 0.03435696168361355 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.34615384615384615, + "acc_stderr,none": 0.024121125416941183 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.35294117647058826, + "acc_stderr,none": 0.031041941304059274 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.3100917431192661, + "acc_stderr,none": 0.019830849684439752 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.3816793893129771, + "acc_stderr,none": 0.042607351576445594 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.22712418300653595, + "acc_stderr,none": 0.01694985327921238 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.3090909090909091, + "acc_stderr,none": 0.044262946482000985 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.3836734693877551, + "acc_stderr,none": 0.031130880396235922 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.3283582089552239, + "acc_stderr,none": 0.033206858897443244 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.27, + "acc_stderr,none": 0.044619604333847394 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.28353948620361563, + "acc_stderr,none": 0.008028532611263519 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.28, + "acc_stderr,none": 0.045126085985421276 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.28888888888888886, + "acc_stderr,none": 0.0391545063041425 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.34210526315789475, + "acc_stderr,none": 0.038607315993160904 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.2152777777777778, + "acc_stderr,none": 0.03437079344106134 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.35, + "acc_stderr,none": 0.0479372485441102 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.28, + "acc_stderr,none": 0.04512608598542127 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.34, + "acc_stderr,none": 0.04760952285695235 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.27450980392156865, + "acc_stderr,none": 0.04440521906179325 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.27, + "acc_stderr,none": 0.04461960433384739 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.3191489361702128, + "acc_stderr,none": 0.03047297336338007 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.296551724137931, + "acc_stderr,none": 0.03806142687309994 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.2619047619047619, + "acc_stderr,none": 0.022644212615525214 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.25483870967741934, + "acc_stderr,none": 0.024790118459332208 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.31527093596059114, + "acc_stderr,none": 0.03269080871970186 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.24, + "acc_stderr,none": 0.04292346959909283 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.2740740740740741, + "acc_stderr,none": 0.02719593480408563 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.31788079470198677, + "acc_stderr,none": 0.03802039760107903 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.25, + "acc_stderr,none": 0.029531221160930918 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.2857142857142857, + "acc_stderr,none": 0.04287858751340456 + } + }, + "task_info": { + "model": "Intel/gpt-j-6b-int4-inc", + "local": true, + "revision": "main", + "private": false, + "params": 6, + "architectures": "GPTJForCausalLM", + "quant_type": "AutoRound", + "precision": "4bit", + "model_params": 6, + "model_size": 4, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-23T15:44:22Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "autoround_version": "0.1", + "bits": 4, + "damp_percent": 0.01, + "desc_act": false, + "enable_minmax_tuning": true, + "group_size": 128, + "is_marlin_format": false, + "iters": 1000, + "lr": 0.001, + "minmax_lr": 0.002, + "model_file_base_name": "model", + "model_name_or_path": null, + "quant_method": "gptq", + "scale_dtype": "torch.float16", + "static_groups": false, + "sym": false, + "true_sequential": false, + "use_quant_input": false + }, + "versions": { + "harness|arc:easy|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|boolq|0": 2.0, + "harness|arc:challenge|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|piqa|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714481307.9563165, + "config": { + "model": "hf", + "model_args": "pretrained=Intel/gpt-j-6b-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 4, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/Intel/results_2024-04-30-23-05-41.json b/Intel/results_2024-04-30-23-05-41.json new file mode 100644 index 0000000000000000000000000000000000000000..5731ab2de9580abb09f36632e6700a6bf090d71e --- /dev/null +++ b/Intel/results_2024-04-30-23-05-41.json @@ -0,0 +1,580 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-04-30-23-05-41", + "total_evaluation_time_secondes": "", + "model_name": "Intel/falcon-7b-instruct-int4-inc", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 4, + "model_params": 7, + "quant_type": "AutoRound", + "precision": "4bit" + }, + "results": { + "harness|truthfulqa:mc2|0": { + "acc,none": 0.43856164323940094, + "acc_stderr,none": 0.014798984792732898, + "alias": "truthfulqa_mc2" + }, + "harness|winogrande|0": { + "acc,none": 0.65982636148382, + "acc_stderr,none": 0.013315218762417395, + "alias": "winogrande" + }, + "harness|arc:easy|0": { + "acc,none": 0.7201178451178452, + "acc_stderr,none": 0.00921207752465653, + "acc_norm,none": 0.6776094276094277, + "acc_norm_stderr,none": 0.009590672908157436, + "alias": "arc_easy" + }, + "harness|hellaswag|0": { + "acc,none": 0.5128460466042621, + "acc_stderr,none": 0.004988134303021786, + "acc_norm,none": 0.691894045010954, + "acc_norm_stderr,none": 0.004607669909914966, + "alias": "hellaswag" + }, + "harness|lambada:openai|0": { + "perplexity,none": 5.293340874409289, + "perplexity_stderr,none": 0.12325842419162542, + "acc,none": 0.6404036483601785, + "acc_stderr,none": 0.006685695764730398, + "alias": "lambada_openai" + }, + "harness|piqa|0": { + "acc,none": 0.7758433079434167, + "acc_stderr,none": 0.009729897956410032, + "acc_norm,none": 0.7818280739934712, + "acc_norm_stderr,none": 0.009636081958374381, + "alias": "piqa" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.2864137086903305, + "acc_stderr,none": 0.01582614243950237, + "alias": "truthfulqa_mc1" + }, + "harness|boolq|0": { + "acc,none": 0.7033639143730887, + "acc_stderr,none": 0.007989039569104798, + "alias": "boolq" + }, + "harness|mmlu|0": { + "acc,none": 0.24818401937046006, + "acc_stderr,none": 0.0036400372036635735, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.24654622741764082, + "acc_stderr,none": 0.0062797290603445926 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.2698412698412698, + "acc_stderr,none": 0.03970158273235172 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.24242424242424243, + "acc_stderr,none": 0.03346409881055953 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.27450980392156865, + "acc_stderr,none": 0.03132179803083292 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.29535864978902954, + "acc_stderr,none": 0.029696338713422893 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.30578512396694213, + "acc_stderr,none": 0.04205953933884124 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.3148148148148148, + "acc_stderr,none": 0.04489931073591312 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.2147239263803681, + "acc_stderr,none": 0.032262193772867744 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.26878612716763006, + "acc_stderr,none": 0.023868003262500114 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.23798882681564246, + "acc_stderr,none": 0.014242630070574882 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.1832797427652733, + "acc_stderr,none": 0.021974198848265795 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.25, + "acc_stderr,none": 0.02409347123262133 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.24119947848761408, + "acc_stderr,none": 0.010926496102034961 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.23391812865497075, + "acc_stderr,none": 0.03246721765117824 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.272288381074992, + "acc_stderr,none": 0.00796038235295151 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.36, + "acc_stderr,none": 0.048241815132442176 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.22641509433962265, + "acc_stderr,none": 0.025757559893106723 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.21965317919075145, + "acc_stderr,none": 0.031568093627031744 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.25, + "acc_stderr,none": 0.04351941398892446 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.36771300448430494, + "acc_stderr,none": 0.03236198350928276 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.2912621359223301, + "acc_stderr,none": 0.04498676320572922 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.27350427350427353, + "acc_stderr,none": 0.029202540153431173 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.34, + "acc_stderr,none": 0.04760952285695236 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.2784163473818646, + "acc_stderr,none": 0.016028295188992462 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.27450980392156865, + "acc_stderr,none": 0.025553169991826517 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.2553191489361702, + "acc_stderr,none": 0.026011992930901992 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.19117647058823528, + "acc_stderr,none": 0.023886881922440335 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.3072289156626506, + "acc_stderr,none": 0.03591566797824664 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.23756906077348067, + "acc_stderr,none": 0.007677188667986144 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.2543859649122807, + "acc_stderr,none": 0.040969851398436695 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.23737373737373738, + "acc_stderr,none": 0.03031371053819888 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.21243523316062177, + "acc_stderr,none": 0.02951928261681726 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.21794871794871795, + "acc_stderr,none": 0.02093244577446319 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.23109243697478993, + "acc_stderr,none": 0.02738140692786897 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.23853211009174313, + "acc_stderr,none": 0.01827257581023186 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.2824427480916031, + "acc_stderr,none": 0.03948406125768361 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.24673202614379086, + "acc_stderr,none": 0.017440820367402497 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.2909090909090909, + "acc_stderr,none": 0.04350271442923243 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.19591836734693877, + "acc_stderr,none": 0.025409301953225678 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.24378109452736318, + "acc_stderr,none": 0.03036049015401465 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.27, + "acc_stderr,none": 0.0446196043338474 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.23723437995559785, + "acc_stderr,none": 0.007571103457514372 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.26, + "acc_stderr,none": 0.04408440022768078 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.24444444444444444, + "acc_stderr,none": 0.03712537833614866 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.18421052631578946, + "acc_stderr,none": 0.0315469804508223 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.2708333333333333, + "acc_stderr,none": 0.03716177437566017 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.21, + "acc_stderr,none": 0.040936018074033256 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.25, + "acc_stderr,none": 0.04351941398892446 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.21, + "acc_stderr,none": 0.040936018074033256 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.24509803921568626, + "acc_stderr,none": 0.042801058373643966 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.28, + "acc_stderr,none": 0.04512608598542127 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.3021276595744681, + "acc_stderr,none": 0.030017554471880554 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.2206896551724138, + "acc_stderr,none": 0.03455930201924812 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.23544973544973544, + "acc_stderr,none": 0.02185150982203171 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.23225806451612904, + "acc_stderr,none": 0.024022256130308235 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.21182266009852216, + "acc_stderr,none": 0.028748983689941075 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.28, + "acc_stderr,none": 0.04512608598542127 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.24074074074074073, + "acc_stderr,none": 0.026067159222275794 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.2185430463576159, + "acc_stderr,none": 0.03374235550425694 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.16203703703703703, + "acc_stderr,none": 0.02513045365226846 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.30357142857142855, + "acc_stderr,none": 0.04364226155841044 + }, + "harness|openbookqa|0": { + "acc,none": 0.31, + "acc_stderr,none": 0.0207040410217248, + "acc_norm,none": 0.412, + "acc_norm_stderr,none": 0.02203367799374086, + "alias": "openbookqa" + }, + "harness|arc:challenge|0": { + "acc,none": 0.40187713310580203, + "acc_stderr,none": 0.014327268614578278, + "acc_norm,none": 0.4274744027303754, + "acc_norm_stderr,none": 0.014456862944650652, + "alias": "arc_challenge" + } + }, + "task_info": { + "model": "Intel/falcon-7b-instruct-int4-inc", + "local": true, + "revision": "main", + "private": false, + "params": 7, + "architectures": "FalconForCausalLM", + "quant_type": "AutoRound", + "precision": "4bit", + "model_params": 7, + "model_size": 4, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-23T15:44:22Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "quant_method": "GPTQ", + "ftype": "*Q4_0.gguf" + }, + "versions": { + "harness|truthfulqa:mc2|0": 2.0, + "harness|winogrande|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|piqa|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|boolq|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|openbookqa|0": 1.0, + "harness|arc:challenge|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714484834.062388, + "config": { + "model": "hf", + "model_args": "pretrained=Intel/falcon-7b-instruct-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/Intel/results_2024-05-01-00-22-37.json b/Intel/results_2024-05-01-00-22-37.json new file mode 100644 index 0000000000000000000000000000000000000000..51e14dfc91375173eefe7a84cae0df591d247f91 --- /dev/null +++ b/Intel/results_2024-05-01-00-22-37.json @@ -0,0 +1,596 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-01-00-22-37", + "total_evaluation_time_secondes": "", + "model_name": "Intel/Qwen1.5-7B-Chat-int4-inc", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 5.86, + "model_params": 6.54, + "quant_type": "AutoRound", + "precision": "4bit" + }, + "results": { + "harness|truthfulqa:mc2|0": { + "acc,none": 0.5173507158944602, + "acc_stderr,none": 0.01586170239259832, + "alias": "truthfulqa_mc2" + }, + "harness|mmlu|0": { + "acc,none": 0.594715852442672, + "acc_stderr,none": 0.0039619613793425955, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5483528161530287, + "acc_stderr,none": 0.006915577304368975 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.3888888888888889, + "acc_stderr,none": 0.04360314860077459 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7575757575757576, + "acc_stderr,none": 0.03346409881055953 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.7647058823529411, + "acc_stderr,none": 0.02977177522814565 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.7679324894514767, + "acc_stderr,none": 0.027479744550808514 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.7024793388429752, + "acc_stderr,none": 0.04173349148083499 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7870370370370371, + "acc_stderr,none": 0.039578354719809784 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7116564417177914, + "acc_stderr,none": 0.03559039531617342 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.653179190751445, + "acc_stderr,none": 0.02562472399403046 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.3675977653631285, + "acc_stderr,none": 0.016125543823552944 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6752411575562701, + "acc_stderr,none": 0.026596782287697046 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.6388888888888888, + "acc_stderr,none": 0.02672586880910079 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.4439374185136897, + "acc_stderr,none": 0.012689708167787686 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7543859649122807, + "acc_stderr,none": 0.0330140594698725 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.6585130350820727, + "acc_stderr,none": 0.008229134682918844 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.65, + "acc_stderr,none": 0.047937248544110196 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.6641509433962264, + "acc_stderr,none": 0.029067220146644823 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.5491329479768786, + "acc_stderr,none": 0.03794012674697029 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.4, + "acc_stderr,none": 0.049236596391733084 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6322869955156951, + "acc_stderr,none": 0.03236198350928276 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.7378640776699029, + "acc_stderr,none": 0.04354631077260595 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8547008547008547, + "acc_stderr,none": 0.023086635086841403 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.7, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.7637292464878672, + "acc_stderr,none": 0.015190473717037484 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.6895424836601307, + "acc_stderr,none": 0.026493033225145905 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.425531914893617, + "acc_stderr,none": 0.029494827600144363 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.6323529411764706, + "acc_stderr,none": 0.029289413409403192 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.4939759036144578, + "acc_stderr,none": 0.03892212195333047 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.6746831329216769, + "acc_stderr,none": 0.00823386269401441 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.4298245614035088, + "acc_stderr,none": 0.046570472605949625 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7828282828282829, + "acc_stderr,none": 0.02937661648494564 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.772020725388601, + "acc_stderr,none": 0.030276909945178277 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.5948717948717949, + "acc_stderr,none": 0.024890471769938142 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.6470588235294118, + "acc_stderr,none": 0.031041941304059288 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.7963302752293578, + "acc_stderr,none": 0.017266742087630807 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.7175572519083969, + "acc_stderr,none": 0.03948406125768361 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.5473856209150327, + "acc_stderr,none": 0.020136790918492523 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6272727272727273, + "acc_stderr,none": 0.04631381319425464 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.6857142857142857, + "acc_stderr,none": 0.02971932942241747 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.7860696517412935, + "acc_stderr,none": 0.028996909693328906 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.79, + "acc_stderr,none": 0.040936018074033256 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.5229939739930225, + "acc_stderr,none": 0.008632328990636458 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.39, + "acc_stderr,none": 0.04902071300001974 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.5259259259259259, + "acc_stderr,none": 0.04313531696750575 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.6513157894736842, + "acc_stderr,none": 0.038781398887976104 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.6527777777777778, + "acc_stderr,none": 0.039812405437178615 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.43, + "acc_stderr,none": 0.049756985195624284 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.57, + "acc_stderr,none": 0.049756985195624284 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.32, + "acc_stderr,none": 0.046882617226215034 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.37254901960784315, + "acc_stderr,none": 0.04810840148082633 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.77, + "acc_stderr,none": 0.04229525846816505 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.5531914893617021, + "acc_stderr,none": 0.0325005368436584 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5655172413793104, + "acc_stderr,none": 0.04130740879555498 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.4656084656084656, + "acc_stderr,none": 0.02569032176249384 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7129032258064516, + "acc_stderr,none": 0.025736542745594525 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.5369458128078818, + "acc_stderr,none": 0.035083705204426656 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.7, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.34074074074074073, + "acc_stderr,none": 0.02889774874113114 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.3841059602649007, + "acc_stderr,none": 0.03971301814719197 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.5416666666666666, + "acc_stderr,none": 0.03398110890294636 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.39285714285714285, + "acc_stderr,none": 0.046355501356099754 + }, + "harness|winogrande|0": { + "acc,none": 0.6535122336227308, + "acc_stderr,none": 0.013373773411685651, + "alias": "winogrande" + }, + "harness|arc:easy|0": { + "acc,none": 0.6877104377104377, + "acc_stderr,none": 0.009509325983631444, + "acc_norm,none": 0.6224747474747475, + "acc_norm_stderr,none": 0.009947227833469428, + "alias": "arc_easy" + }, + "harness|boolq|0": { + "acc,none": 0.8388379204892966, + "acc_stderr,none": 0.006430770316534764, + "alias": "boolq" + }, + "harness|hellaswag|0": { + "acc,none": 0.5849432383987253, + "acc_stderr,none": 0.004917248150601868, + "acc_norm,none": 0.7642899820752838, + "acc_norm_stderr,none": 0.004235743182042662, + "alias": "hellaswag" + }, + "harness|arc:challenge|0": { + "acc,none": 0.431740614334471, + "acc_stderr,none": 0.014474591427196204, + "acc_norm,none": 0.4462457337883959, + "acc_norm_stderr,none": 0.014526705548539982, + "alias": "arc_challenge" + }, + "harness|lambada:openai|0": { + "perplexity,none": 5.570175233579023, + "perplexity_stderr,none": 0.18749563803186947, + "acc,none": 0.6111003299049098, + "acc_stderr,none": 0.0067918348844501425, + "alias": "lambada_openai" + }, + "harness|openbookqa|0": { + "acc,none": 0.33, + "acc_stderr,none": 0.021049612166134796, + "acc_norm,none": 0.43, + "acc_norm_stderr,none": 0.02216263442665284, + "alias": "openbookqa" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.3525091799265606, + "acc_stderr,none": 0.01672464638075655, + "alias": "truthfulqa_mc1" + }, + "harness|piqa|0": { + "acc,none": 0.7480957562568009, + "acc_stderr,none": 0.010128421335088678, + "acc_norm,none": 0.7546245919477693, + "acc_norm_stderr,none": 0.010039831320422401, + "alias": "piqa" + } + }, + "task_info": { + "model": "Intel/Qwen1.5-7B-Chat-int4-inc", + "local": true, + "revision": "main", + "private": false, + "params": 7, + "architectures": "QwenForCausalLM", + "quant_type": "AutoRound", + "precision": "4bit", + "model_params": 7, + "model_size": 4, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-23T15:44:22Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "autoround_version": "0.1", + "bits": 4, + "damp_percent": 0.01, + "desc_act": false, + "enable_minmax_tuning": true, + "group_size": 128, + "is_marlin_format": false, + "iters": 1000, + "lr": 0.001, + "minmax_lr": 0.002, + "model_file_base_name": "model", + "model_name_or_path": null, + "quant_method": "gptq", + "scale_dtype": "torch.float16", + "static_groups": false, + "sym": false, + "true_sequential": false, + "use_quant_input": true + }, + "versions": { + "harness|truthfulqa:mc2|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|winogrande|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|boolq|0": 2.0, + "harness|hellaswag|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|piqa|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714489707.5071816, + "config": { + "model": "hf", + "model_args": "pretrained=Intel/Qwen1.5-7B-Chat-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/Intel/results_2024-05-01-00-56-14.json b/Intel/results_2024-05-01-00-56-14.json new file mode 100644 index 0000000000000000000000000000000000000000..f6e4d9d867d8418f86dada6a85a0a0514a880304 --- /dev/null +++ b/Intel/results_2024-05-01-00-56-14.json @@ -0,0 +1,596 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-01-00-56-14", + "total_evaluation_time_secondes": "", + "model_name": "Intel/Qwen1.5-0.5B-Chat-int4-inc", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 0.78, + "model_params": 0.31, + "quant_type": "AutoRound", + "precision": "4bit" + }, + "results": { + "harness|hellaswag|0": { + "acc,none": 0.36008763194582755, + "acc_stderr,none": 0.004790445139186363, + "acc_norm,none": 0.4422425811591316, + "acc_norm_stderr,none": 0.004956378590571537, + "alias": "hellaswag" + }, + "harness|mmlu|0": { + "acc,none": 0.2910554052129326, + "acc_stderr,none": 0.0037995259748246695, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.29861849096705634, + "acc_stderr,none": 0.006616169165331545 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.30158730158730157, + "acc_stderr,none": 0.04104947269903394 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.4303030303030303, + "acc_stderr,none": 0.038662259628790774 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.35784313725490197, + "acc_stderr,none": 0.033644872860882975 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.45569620253164556, + "acc_stderr,none": 0.032419206846933335 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.4297520661157025, + "acc_stderr,none": 0.04519082021319773 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.39814814814814814, + "acc_stderr,none": 0.047323326159788154 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.3374233128834356, + "acc_stderr,none": 0.03714908409935575 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.315028901734104, + "acc_stderr,none": 0.0250093137900697 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.23798882681564246, + "acc_stderr,none": 0.014242630070574885 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.2958199356913183, + "acc_stderr,none": 0.025922371788818788 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.32407407407407407, + "acc_stderr,none": 0.026041766202717156 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.25684485006518903, + "acc_stderr,none": 0.011158455853098838 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.30409356725146197, + "acc_stderr,none": 0.035282112582452306 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.3202446089475378, + "acc_stderr,none": 0.008268989007731709 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.38, + "acc_stderr,none": 0.04878317312145633 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.2528301886792453, + "acc_stderr,none": 0.026749899771241235 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.2658959537572254, + "acc_stderr,none": 0.033687629322594316 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.17, + "acc_stderr,none": 0.0377525168068637 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.39461883408071746, + "acc_stderr,none": 0.03280400504755291 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.32038834951456313, + "acc_stderr,none": 0.046202840822800406 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.4358974358974359, + "acc_stderr,none": 0.03248577511578401 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.38, + "acc_stderr,none": 0.048783173121456316 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.37037037037037035, + "acc_stderr,none": 0.01726860756000578 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.3562091503267974, + "acc_stderr,none": 0.027420477662629228 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.2198581560283688, + "acc_stderr,none": 0.024706141070705477 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.1875, + "acc_stderr,none": 0.023709788253811766 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.3253012048192771, + "acc_stderr,none": 0.03647168523683226 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.2934676633084173, + "acc_stderr,none": 0.00818128243976792 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.20175438596491227, + "acc_stderr,none": 0.037752050135836386 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.32323232323232326, + "acc_stderr,none": 0.03332299921070643 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.2538860103626943, + "acc_stderr,none": 0.03141024780565318 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.27692307692307694, + "acc_stderr,none": 0.022688042352424994 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.2689075630252101, + "acc_stderr,none": 0.02880139219363127 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.3174311926605505, + "acc_stderr,none": 0.0199571521984605 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.3893129770992366, + "acc_stderr,none": 0.04276486542814591 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.29248366013071897, + "acc_stderr,none": 0.018403415710109793 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.2909090909090909, + "acc_stderr,none": 0.04350271442923243 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.20816326530612245, + "acc_stderr,none": 0.025991117672813292 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.3383084577114428, + "acc_stderr,none": 0.03345563070339193 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.41, + "acc_stderr,none": 0.049431107042371025 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.24865207738661593, + "acc_stderr,none": 0.0076626997718791485 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.24, + "acc_stderr,none": 0.04292346959909282 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.2962962962962963, + "acc_stderr,none": 0.03944624162501116 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.2631578947368421, + "acc_stderr,none": 0.03583496176361064 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.2986111111111111, + "acc_stderr,none": 0.03827052357950756 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.2, + "acc_stderr,none": 0.040201512610368445 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.32, + "acc_stderr,none": 0.04688261722621504 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.32, + "acc_stderr,none": 0.046882617226215034 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.2647058823529412, + "acc_stderr,none": 0.04389869956808778 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.34, + "acc_stderr,none": 0.04760952285695236 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.26382978723404255, + "acc_stderr,none": 0.028809989854102973 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.2689655172413793, + "acc_stderr,none": 0.036951833116502325 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.18783068783068782, + "acc_stderr,none": 0.020115734141521104 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.27419354838709675, + "acc_stderr,none": 0.02537813997088521 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.1921182266009852, + "acc_stderr,none": 0.027719315709614778 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.22962962962962963, + "acc_stderr,none": 0.025644108639267624 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.2119205298013245, + "acc_stderr,none": 0.03336767086567977 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.1527777777777778, + "acc_stderr,none": 0.024536326026134217 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.3482142857142857, + "acc_stderr,none": 0.04521829902833585 + }, + "harness|arc:easy|0": { + "acc,none": 0.4696969696969697, + "acc_stderr,none": 0.010240923608726537, + "acc_norm,none": 0.41919191919191917, + "acc_norm_stderr,none": 0.01012490528249118, + "alias": "arc_easy" + }, + "harness|piqa|0": { + "acc,none": 0.6632208922742111, + "acc_stderr,none": 0.011026738925251179, + "acc_norm,none": 0.6594124047878128, + "acc_norm_stderr,none": 0.011057027540404739, + "alias": "piqa" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.42577722320751654, + "acc_stderr,none": 0.015077974583911373, + "alias": "truthfulqa_mc2" + }, + "harness|boolq|0": { + "acc,none": 0.4379204892966361, + "acc_stderr,none": 0.008677388652709263, + "alias": "boolq" + }, + "harness|openbookqa|0": { + "acc,none": 0.188, + "acc_stderr,none": 0.01749067888034626, + "acc_norm,none": 0.312, + "acc_norm_stderr,none": 0.020740596536488087, + "alias": "openbookqa" + }, + "harness|winogrande|0": { + "acc,none": 0.5430149960536701, + "acc_stderr,none": 0.01400038676159829, + "alias": "winogrande" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.2484700122399021, + "acc_stderr,none": 0.015127427096520677, + "alias": "truthfulqa_mc1" + }, + "harness|lambada:openai|0": { + "perplexity,none": 30.634680468634887, + "perplexity_stderr,none": 1.491757631028465, + "acc,none": 0.4001552493692994, + "acc_stderr,none": 0.0068256774766065256, + "alias": "lambada_openai" + }, + "harness|arc:challenge|0": { + "acc,none": 0.24658703071672355, + "acc_stderr,none": 0.012595726268790122, + "acc_norm,none": 0.29266211604095566, + "acc_norm_stderr,none": 0.013295916103619411, + "alias": "arc_challenge" + } + }, + "task_info": { + "model": "Intel/Qwen1.5-0.5B-Chat-int4-inc", + "local": true, + "revision": "main", + "private": false, + "params": 1, + "architectures": "QwenForCausalLM", + "quant_type": "AutoRound", + "precision": "4bit", + "model_params": 1, + "model_size": 0.5, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-23T15:44:22Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "autoround_version": "0.1", + "bits": 4, + "damp_percent": 0.01, + "desc_act": false, + "enable_minmax_tuning": true, + "group_size": 128, + "is_marlin_format": false, + "iters": 200, + "lr": 0.005, + "minmax_lr": 0.005, + "model_file_base_name": "model", + "model_name_or_path": null, + "quant_method": "gptq", + "scale_dtype": "torch.float16", + "static_groups": false, + "sym": false, + "true_sequential": false, + "use_quant_input": true + }, + "versions": { + "harness|hellaswag|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|arc:easy|0": 1.0, + "harness|piqa|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|boolq|0": 2.0, + "harness|openbookqa|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|lambada:openai|0": 1.0, + "harness|arc:challenge|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714494322.3458931, + "config": { + "model": "hf", + "model_args": "pretrained=Intel/Qwen1.5-0.5B-Chat-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 4, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/Intel/results_2024-05-01-02-20-44.json b/Intel/results_2024-05-01-02-20-44.json new file mode 100644 index 0000000000000000000000000000000000000000..bbeb29b4aee4a26db4e7e4f469d095421d15d3b9 --- /dev/null +++ b/Intel/results_2024-05-01-02-20-44.json @@ -0,0 +1,596 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-01-02-20-44", + "total_evaluation_time_secondes": "", + "model_name": "Intel/gemma-7b-int4-inc", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 7.18, + "model_params": 7.82, + "quant_type": "AutoRound", + "precision": "4bit" + }, + "results": { + "harness|arc:easy|0": { + "acc,none": 0.8152356902356902, + "acc_stderr,none": 0.007963772171570793, + "acc_norm,none": 0.8085016835016835, + "acc_norm_stderr,none": 0.00807404447731971, + "alias": "arc_easy" + }, + "harness|boolq|0": { + "acc,none": 0.8281345565749235, + "acc_stderr,none": 0.006598379269781497, + "alias": "boolq" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.42284386411413843, + "acc_stderr,none": 0.014468335415445921, + "alias": "truthfulqa_mc2" + }, + "harness|arc:challenge|0": { + "acc,none": 0.5017064846416383, + "acc_stderr,none": 0.014611305705056995, + "acc_norm,none": 0.5324232081911263, + "acc_norm_stderr,none": 0.014580637569995426, + "alias": "arc_challenge" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.28886168910648713, + "acc_stderr,none": 0.015866346401384308, + "alias": "truthfulqa_mc1" + }, + "harness|piqa|0": { + "acc,none": 0.8057671381936888, + "acc_stderr,none": 0.009230209366168288, + "acc_norm,none": 0.8128400435255713, + "acc_norm_stderr,none": 0.009100273290473552, + "alias": "piqa" + }, + "harness|winogrande|0": { + "acc,none": 0.745067087608524, + "acc_stderr,none": 0.012248806969376422, + "alias": "winogrande" + }, + "harness|mmlu|0": { + "acc,none": 0.6110952855718559, + "acc_stderr,none": 0.003862144549291668, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5464399574920298, + "acc_stderr,none": 0.006685840827507319 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.42857142857142855, + "acc_stderr,none": 0.04426266681379909 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7151515151515152, + "acc_stderr,none": 0.035243908445117815 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.7696078431372549, + "acc_stderr,none": 0.029554292605695063 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.7890295358649789, + "acc_stderr,none": 0.02655837250266192 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.7851239669421488, + "acc_stderr,none": 0.037494924487096966 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7592592592592593, + "acc_stderr,none": 0.041331194402438376 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.754601226993865, + "acc_stderr,none": 0.03380939813943354 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.6734104046242775, + "acc_stderr,none": 0.025248264774242836 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.2424581005586592, + "acc_stderr,none": 0.014333522059217887 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6913183279742765, + "acc_stderr,none": 0.02623696588115326 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.7160493827160493, + "acc_stderr,none": 0.025089478523765134 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.46479791395045633, + "acc_stderr,none": 0.01273854737130395 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.847953216374269, + "acc_stderr,none": 0.02753912288906145 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.6906984229159961, + "acc_stderr,none": 0.007976549912624318 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.61, + "acc_stderr,none": 0.04902071300001974 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.690566037735849, + "acc_stderr,none": 0.028450154794118637 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.5895953757225434, + "acc_stderr,none": 0.037507570448955356 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.42, + "acc_stderr,none": 0.049604496374885836 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6905829596412556, + "acc_stderr,none": 0.031024411740572206 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.8446601941747572, + "acc_stderr,none": 0.03586594738573974 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8974358974358975, + "acc_stderr,none": 0.019875655027867443 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.68, + "acc_stderr,none": 0.04688261722621504 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.8071519795657727, + "acc_stderr,none": 0.014108533515757433 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.6993464052287581, + "acc_stderr,none": 0.026256053835718964 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.48226950354609927, + "acc_stderr,none": 0.02980873964223777 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.6433823529411765, + "acc_stderr,none": 0.029097209568411955 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.4939759036144578, + "acc_stderr,none": 0.03892212195333045 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.7062073448163796, + "acc_stderr,none": 0.008040998950033304 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.43859649122807015, + "acc_stderr,none": 0.04668000738510455 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.797979797979798, + "acc_stderr,none": 0.02860620428922989 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8238341968911918, + "acc_stderr,none": 0.027493504244548064 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.617948717948718, + "acc_stderr,none": 0.02463554916390823 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.6512605042016807, + "acc_stderr,none": 0.030956636328566548 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.8128440366972477, + "acc_stderr,none": 0.016722684526200154 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.7099236641221374, + "acc_stderr,none": 0.03980066246467765 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.6356209150326797, + "acc_stderr,none": 0.019469518221573695 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6454545454545455, + "acc_stderr,none": 0.04582004841505417 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.7183673469387755, + "acc_stderr,none": 0.02879518557429127 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.7611940298507462, + "acc_stderr,none": 0.030147775935409217 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.85, + "acc_stderr,none": 0.03588702812826371 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.5363146209958769, + "acc_stderr,none": 0.00854104744342032 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.36, + "acc_stderr,none": 0.04824181513244218 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.6, + "acc_stderr,none": 0.04232073695151589 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.6776315789473685, + "acc_stderr,none": 0.038035102483515854 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.7777777777777778, + "acc_stderr,none": 0.03476590104304134 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.46, + "acc_stderr,none": 0.05009082659620332 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.5, + "acc_stderr,none": 0.050251890762960605 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.37, + "acc_stderr,none": 0.048523658709391 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.45098039215686275, + "acc_stderr,none": 0.049512182523962604 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.71, + "acc_stderr,none": 0.045604802157206845 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.6085106382978723, + "acc_stderr,none": 0.03190701242326812 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5655172413793104, + "acc_stderr,none": 0.04130740879555497 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.42592592592592593, + "acc_stderr,none": 0.025467149045469557 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7709677419354839, + "acc_stderr,none": 0.023904914311782655 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.5517241379310345, + "acc_stderr,none": 0.03499113137676744 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.62, + "acc_stderr,none": 0.048783173121456316 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.3333333333333333, + "acc_stderr,none": 0.02874204090394849 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.36423841059602646, + "acc_stderr,none": 0.03929111781242742 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.5462962962962963, + "acc_stderr,none": 0.03395322726375797 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.41964285714285715, + "acc_stderr,none": 0.04684099321077106 + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.391288578457952, + "perplexity_stderr,none": 0.06834212955306411, + "acc,none": 0.7252086163399961, + "acc_stderr,none": 0.006219351548299038, + "alias": "lambada_openai" + }, + "harness|openbookqa|0": { + "acc,none": 0.338, + "acc_stderr,none": 0.02117566569520941, + "acc_norm,none": 0.45, + "acc_norm_stderr,none": 0.022270877485360444, + "alias": "openbookqa" + }, + "harness|hellaswag|0": { + "acc,none": 0.6032662816172077, + "acc_stderr,none": 0.004882200364432386, + "acc_norm,none": 0.799044015136427, + "acc_norm_stderr,none": 0.003998962580974678, + "alias": "hellaswag" + } + }, + "task_info": { + "model": "Intel/gemma-7b-int4-inc", + "local": true, + "revision": "main", + "private": false, + "params": 7, + "architectures": "GemmaForCausalLM", + "quant_type": "AutoRound", + "precision": "4bit", + "model_params": 7, + "model_size": 4, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-23T15:44:22Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "autoround_version": "0.1", + "bits": 4, + "damp_percent": 0.01, + "desc_act": false, + "enable_minmax_tuning": true, + "group_size": 128, + "is_marlin_format": false, + "iters": 1000, + "lr": 0.001, + "minmax_lr": 0.002, + "model_file_base_name": "model", + "model_name_or_path": null, + "quant_method": "gptq", + "scale_dtype": "torch.float16", + "static_groups": false, + "sym": false, + "true_sequential": false, + "use_quant_input": true + }, + "versions": { + "harness|arc:easy|0": 1.0, + "harness|boolq|0": 2.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|arc:challenge|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|piqa|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|lambada:openai|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|hellaswag|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714496298.0373547, + "config": { + "model": "hf", + "model_args": "pretrained=Intel/gemma-7b-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/Intel/results_2024-05-01-04-10-41.json b/Intel/results_2024-05-01-04-10-41.json new file mode 100644 index 0000000000000000000000000000000000000000..a48655fe12493853366511621601ce10ebfc7372 --- /dev/null +++ b/Intel/results_2024-05-01-04-10-41.json @@ -0,0 +1,596 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-01-04-10-41", + "total_evaluation_time_secondes": "", + "model_name": "Intel/SOLAR-10.7B-Instruct-v1.0-int4-inc", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 5.6, + "model_params": 10.57, + "quant_type": "AutoRound", + "precision": "4bit" + }, + "results": { + "harness|winogrande|0": { + "acc,none": 0.7695343330702447, + "acc_stderr,none": 0.011835872164836671, + "alias": "winogrande" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.064920029771224, + "perplexity_stderr,none": 0.07016086763498619, + "acc,none": 0.7335532699398408, + "acc_stderr,none": 0.006159324694087315, + "alias": "lambada_openai" + }, + "harness|openbookqa|0": { + "acc,none": 0.356, + "acc_stderr,none": 0.02143471235607264, + "acc_norm,none": 0.46, + "acc_norm_stderr,none": 0.022311333245289666, + "alias": "openbookqa" + }, + "harness|arc:easy|0": { + "acc,none": 0.8265993265993266, + "acc_stderr,none": 0.007768570412816704, + "acc_norm,none": 0.8211279461279462, + "acc_norm_stderr,none": 0.007864024474332735, + "alias": "arc_easy" + }, + "harness|arc:challenge|0": { + "acc,none": 0.6049488054607508, + "acc_stderr,none": 0.014285898292938162, + "acc_norm,none": 0.6313993174061433, + "acc_norm_stderr,none": 0.014097810678042194, + "alias": "arc_challenge" + }, + "harness|piqa|0": { + "acc,none": 0.8073993471164309, + "acc_stderr,none": 0.009200649707017568, + "acc_norm,none": 0.8079434167573449, + "acc_norm_stderr,none": 0.00919074029512647, + "alias": "piqa" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.5605875152998776, + "acc_stderr,none": 0.017374520482513714, + "alias": "truthfulqa_mc1" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.7179802896446443, + "acc_stderr,none": 0.014981196795486781, + "alias": "truthfulqa_mc2" + }, + "harness|hellaswag|0": { + "acc,none": 0.6829316869149572, + "acc_stderr,none": 0.004643832742876648, + "acc_norm,none": 0.8527185819557856, + "acc_norm_stderr,none": 0.0035366196730199986, + "alias": "hellaswag" + }, + "harness|boolq|0": { + "acc,none": 0.882874617737003, + "acc_stderr,none": 0.005624288190378992, + "alias": "boolq" + }, + "harness|mmlu|0": { + "acc,none": 0.6242700470018516, + "acc_stderr,none": 0.0038176950123177803, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5685441020191286, + "acc_stderr,none": 0.006612407808418779 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.4126984126984127, + "acc_stderr,none": 0.04403438954768177 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.8121212121212121, + "acc_stderr,none": 0.03050193405942914 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.8235294117647058, + "acc_stderr,none": 0.026756401538078945 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.8607594936708861, + "acc_stderr,none": 0.022535526352692712 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.7933884297520661, + "acc_stderr,none": 0.03695980128098826 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7592592592592593, + "acc_stderr,none": 0.041331194402438376 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.6993865030674846, + "acc_stderr,none": 0.03602511318806771 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.7167630057803468, + "acc_stderr,none": 0.024257901705323374 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.2547486033519553, + "acc_stderr,none": 0.014572650383409155 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.707395498392283, + "acc_stderr,none": 0.02583989833487798 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.75, + "acc_stderr,none": 0.02409347123262133 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.49022164276401564, + "acc_stderr,none": 0.012767793787729333 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.783625730994152, + "acc_stderr,none": 0.031581495393387324 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.7058255551979401, + "acc_stderr,none": 0.007888326651888832 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.66, + "acc_stderr,none": 0.04760952285695237 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.7132075471698113, + "acc_stderr,none": 0.02783491252754407 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.6358381502890174, + "acc_stderr,none": 0.03669072477416907 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.34, + "acc_stderr,none": 0.04760952285695236 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6816143497757847, + "acc_stderr,none": 0.03126580522513713 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.8155339805825242, + "acc_stderr,none": 0.03840423627288276 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8846153846153846, + "acc_stderr,none": 0.020930193185179333 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.71, + "acc_stderr,none": 0.04560480215720684 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.80970625798212, + "acc_stderr,none": 0.01403694585038138 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.7189542483660131, + "acc_stderr,none": 0.025738854797818716 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.5141843971631206, + "acc_stderr,none": 0.02981549448368206 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.7058823529411765, + "acc_stderr,none": 0.027678468642144703 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.536144578313253, + "acc_stderr,none": 0.03882310850890594 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.7299317517062074, + "acc_stderr,none": 0.007800110585101146 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.43859649122807015, + "acc_stderr,none": 0.04668000738510455 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.8333333333333334, + "acc_stderr,none": 0.026552207828215296 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8860103626943006, + "acc_stderr,none": 0.022935144053919432 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.658974358974359, + "acc_stderr,none": 0.02403548967633508 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.680672268907563, + "acc_stderr,none": 0.030283995525884403 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.8256880733944955, + "acc_stderr,none": 0.016265675632010347 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.732824427480916, + "acc_stderr,none": 0.038808483010823944 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.6470588235294118, + "acc_stderr,none": 0.019333142020797164 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6636363636363637, + "acc_stderr,none": 0.04525393596302506 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.7061224489795919, + "acc_stderr,none": 0.029162738410249765 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.8208955223880597, + "acc_stderr,none": 0.027113286753111837 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.88, + "acc_stderr,none": 0.03265986323710905 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.5239454487789407, + "acc_stderr,none": 0.008564264452351101 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.35, + "acc_stderr,none": 0.0479372485441102 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.6, + "acc_stderr,none": 0.04232073695151589 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.7236842105263158, + "acc_stderr,none": 0.03639057569952929 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.7083333333333334, + "acc_stderr,none": 0.03800968060554858 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.4, + "acc_stderr,none": 0.04923659639173309 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.45, + "acc_stderr,none": 0.05000000000000001 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.36, + "acc_stderr,none": 0.04824181513244218 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.3627450980392157, + "acc_stderr,none": 0.04784060704105655 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.71, + "acc_stderr,none": 0.045604802157206845 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.5702127659574469, + "acc_stderr,none": 0.03236214467715563 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5586206896551724, + "acc_stderr,none": 0.04137931034482757 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.47619047619047616, + "acc_stderr,none": 0.025722097064388525 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7580645161290323, + "acc_stderr,none": 0.024362599693031114 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.47783251231527096, + "acc_stderr,none": 0.03514528562175007 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.64, + "acc_stderr,none": 0.04824181513244218 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.34814814814814815, + "acc_stderr,none": 0.02904560029061625 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.31125827814569534, + "acc_stderr,none": 0.03780445850526733 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.5185185185185185, + "acc_stderr,none": 0.03407632093854052 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.45535714285714285, + "acc_stderr,none": 0.04726835553719099 + } + }, + "task_info": { + "model": "Intel/SOLAR-10.7B-Instruct-v1.0-int4-inc", + "local": true, + "revision": "main", + "private": false, + "params": 10.7, + "architectures": "LlamaForCausalLM", + "quant_type": "AutoRound", + "precision": "4bit", + "model_params": 10.7, + "model_size": 5.6, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-23T15:44:22Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "autoround_version": "0.1", + "bits": 4, + "damp_percent": 0.01, + "desc_act": false, + "enable_minmax_tuning": true, + "group_size": 128, + "is_marlin_format": false, + "iters": 200, + "lr": 0.005, + "minmax_lr": 0.002, + "model_file_base_name": "model", + "model_name_or_path": null, + "quant_method": "gptq", + "scale_dtype": "torch.float16", + "static_groups": false, + "sym": false, + "true_sequential": false, + "use_quant_input": false + }, + "versions": { + "harness|winogrande|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|piqa|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|hellaswag|0": 1.0, + "harness|boolq|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714501508.9874723, + "config": { + "model": "hf", + "model_args": "pretrained=Intel/SOLAR-10.7B-Instruct-v1.0-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/Intel/results_2024-05-01-05-22-27.json b/Intel/results_2024-05-01-05-22-27.json new file mode 100644 index 0000000000000000000000000000000000000000..8d3f376098152612253dab0b082affba5c1a5237 --- /dev/null +++ b/Intel/results_2024-05-01-05-22-27.json @@ -0,0 +1,596 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-01-05-22-27", + "total_evaluation_time_secondes": "", + "model_name": "Intel/Phi-3-mini-4k-instruct-int4-inc", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 2.28, + "model_params": 3.66, + "quant_type": "AutoRound", + "precision": "4bit" + }, + "results": { + "harness|piqa|0": { + "acc,none": 0.7932535364526659, + "acc_stderr,none": 0.009448665514183262, + "acc_norm,none": 0.795429815016322, + "acc_norm_stderr,none": 0.009411688039193606, + "alias": "piqa" + }, + "harness|hellaswag|0": { + "acc,none": 0.5945030870344553, + "acc_stderr,none": 0.004899845087183108, + "acc_norm,none": 0.7794264090818562, + "acc_norm_stderr,none": 0.00413786037078583, + "alias": "hellaswag" + }, + "harness|arc:easy|0": { + "acc,none": 0.8333333333333334, + "acc_stderr,none": 0.007647191129018641, + "acc_norm,none": 0.8085016835016835, + "acc_norm_stderr,none": 0.008074044477319723, + "alias": "arc_easy" + }, + "harness|boolq|0": { + "acc,none": 0.8617737003058104, + "acc_stderr,none": 0.006036490185165252, + "alias": "boolq" + }, + "harness|arc:challenge|0": { + "acc,none": 0.5708191126279863, + "acc_stderr,none": 0.014464085894870653, + "acc_norm,none": 0.5827645051194539, + "acc_norm_stderr,none": 0.014409825518403079, + "alias": "arc_challenge" + }, + "harness|mmlu|0": { + "acc,none": 0.666215638797892, + "acc_stderr,none": 0.003791130008412831, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.6374070138150904, + "acc_stderr,none": 0.006758438130469111 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.5317460317460317, + "acc_stderr,none": 0.04463112720677173 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.8121212121212121, + "acc_stderr,none": 0.03050193405942914 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.803921568627451, + "acc_stderr,none": 0.027865942286639325 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.8185654008438819, + "acc_stderr,none": 0.02508596114457965 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.8347107438016529, + "acc_stderr,none": 0.03390780612972776 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7407407407407407, + "acc_stderr,none": 0.042365112580946315 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.8159509202453987, + "acc_stderr,none": 0.030446777687971726 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.7225433526011561, + "acc_stderr,none": 0.024105712607754307 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.5519553072625698, + "acc_stderr,none": 0.016631976628930595 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.7009646302250804, + "acc_stderr,none": 0.026003301117885135 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.7746913580246914, + "acc_stderr,none": 0.02324620264781975 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.5052151238591917, + "acc_stderr,none": 0.01276954144965255 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.8070175438596491, + "acc_stderr,none": 0.030267457554898465 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.7022851625362085, + "acc_stderr,none": 0.0078979256605874 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.68, + "acc_stderr,none": 0.04688261722621504 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.7584905660377359, + "acc_stderr,none": 0.026341480371118352 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.6416184971098265, + "acc_stderr,none": 0.03656343653353159 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.36, + "acc_stderr,none": 0.048241815132442176 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.672645739910314, + "acc_stderr,none": 0.03149384670994131 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.8155339805825242, + "acc_stderr,none": 0.03840423627288276 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8846153846153846, + "acc_stderr,none": 0.02093019318517933 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.71, + "acc_stderr,none": 0.04560480215720683 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.8071519795657727, + "acc_stderr,none": 0.014108533515757431 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.6895424836601307, + "acc_stderr,none": 0.0264930332251459 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.5, + "acc_stderr,none": 0.029827499313594685 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.6911764705882353, + "acc_stderr,none": 0.028064998167040094 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.4939759036144578, + "acc_stderr,none": 0.03892212195333047 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.7741306467338317, + "acc_stderr,none": 0.007389924316755524 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.5614035087719298, + "acc_stderr,none": 0.04668000738510455 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.8383838383838383, + "acc_stderr,none": 0.026225919863629293 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8808290155440415, + "acc_stderr,none": 0.02338193534812144 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.7, + "acc_stderr,none": 0.02323458108842849 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.7941176470588235, + "acc_stderr,none": 0.026265024608275882 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.8807339449541285, + "acc_stderr,none": 0.01389572929258898 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.732824427480916, + "acc_stderr,none": 0.03880848301082396 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.704248366013072, + "acc_stderr,none": 0.018463154132632813 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6454545454545455, + "acc_stderr,none": 0.04582004841505417 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.7714285714285715, + "acc_stderr,none": 0.026882144922307744 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.845771144278607, + "acc_stderr,none": 0.02553843336857833 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.83, + "acc_stderr,none": 0.03775251680686371 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.5683476054551221, + "acc_stderr,none": 0.008412846957194594 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.37, + "acc_stderr,none": 0.04852365870939099 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.6518518518518519, + "acc_stderr,none": 0.041153246103369526 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.75, + "acc_stderr,none": 0.03523807393012047 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.7777777777777778, + "acc_stderr,none": 0.03476590104304134 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.46, + "acc_stderr,none": 0.05009082659620332 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.46, + "acc_stderr,none": 0.05009082659620333 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.31, + "acc_stderr,none": 0.04648231987117317 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.39215686274509803, + "acc_stderr,none": 0.04858083574266345 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.78, + "acc_stderr,none": 0.041633319989322605 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.6468085106382979, + "acc_stderr,none": 0.031245325202761923 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.6068965517241379, + "acc_stderr,none": 0.040703290137070705 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.5158730158730159, + "acc_stderr,none": 0.025738330639412152 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.8096774193548387, + "acc_stderr,none": 0.02233170761182307 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.5665024630541872, + "acc_stderr,none": 0.03486731727419873 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.67, + "acc_stderr,none": 0.04725815626252607 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.32222222222222224, + "acc_stderr,none": 0.028493465091028597 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.4370860927152318, + "acc_stderr,none": 0.040500357222306355 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.5694444444444444, + "acc_stderr,none": 0.03376922151252336 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.5, + "acc_stderr,none": 0.04745789978762494 + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.57314569155882, + "acc_stderr,none": 0.015370888671817379, + "alias": "truthfulqa_mc2" + }, + "harness|lambada:openai|0": { + "perplexity,none": 4.232981066177356, + "perplexity_stderr,none": 0.1029519445833899, + "acc,none": 0.6813506695129051, + "acc_stderr,none": 0.006491632434663263, + "alias": "lambada_openai" + }, + "harness|winogrande|0": { + "acc,none": 0.7348066298342542, + "acc_stderr,none": 0.01240654946619286, + "alias": "winogrande" + }, + "harness|openbookqa|0": { + "acc,none": 0.386, + "acc_stderr,none": 0.02179352921928116, + "acc_norm,none": 0.468, + "acc_norm_stderr,none": 0.0223371864790443, + "alias": "openbookqa" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.386780905752754, + "acc_stderr,none": 0.017048857010515107, + "alias": "truthfulqa_mc1" + } + }, + "task_info": { + "model": "Intel/Phi-3-mini-4k-instruct-int4-inc", + "local": true, + "revision": "main", + "private": false, + "params": 1, + "architectures": "Phi3ForCausalLM", + "quant_type": "AutoRound", + "precision": "4bit", + "model_params": 1, + "model_size": 2.2, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-23T15:44:22Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "autoround_version": "0.1", + "bits": 4, + "damp_percent": 0.01, + "desc_act": false, + "enable_minmax_tuning": true, + "group_size": 128, + "is_marlin_format": false, + "iters": 1000, + "lr": 0.001, + "minmax_lr": 0.001, + "model_file_base_name": "model", + "model_name_or_path": null, + "quant_method": "gptq", + "scale_dtype": "torch.float32", + "static_groups": false, + "sym": false, + "true_sequential": false, + "use_quant_input": false + }, + "versions": { + "harness|piqa|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|boolq|0": 2.0, + "harness|arc:challenge|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|lambada:openai|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714509384.8957896, + "config": { + "model": "hf", + "model_args": "pretrained=Intel/Phi-3-mini-4k-instruct-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 4, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/Intel/results_2024-05-06-18-48-05.json b/Intel/results_2024-05-06-18-48-05.json new file mode 100644 index 0000000000000000000000000000000000000000..2c2d5d7780972ad1850acfba5ed6ac74247ce9ca --- /dev/null +++ b/Intel/results_2024-05-06-18-48-05.json @@ -0,0 +1,596 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-06-18-48-05", + "total_evaluation_time_secondes": "", + "model_name": "Intel/Baichuan2-7B-Chat-int4-inc", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 5.43, + "model_params": 6.53, + "quant_type": "AutoRound", + "precision": "4bit" + }, + "results": { + "harness|truthfulqa:mc2|0": { + "acc,none": 0.47472158177098, + "acc_stderr,none": 0.015422315616226206, + "alias": "truthfulqa_mc2" + }, + "harness|arc:challenge|0": { + "acc,none": 0.4104095563139932, + "acc_stderr,none": 0.014374922192642662, + "acc_norm,none": 0.42406143344709896, + "acc_norm_stderr,none": 0.014441889627464396, + "alias": "arc_challenge" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.971916376229947, + "perplexity_stderr,none": 0.1125823000420938, + "acc,none": 0.6751406947409276, + "acc_stderr,none": 0.006524644766835827, + "alias": "lambada_openai" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.31946144430844553, + "acc_stderr,none": 0.016322644182960505, + "alias": "truthfulqa_mc1" + }, + "harness|boolq|0": { + "acc,none": 0.7825688073394496, + "acc_stderr,none": 0.007214641080602786, + "alias": "boolq" + }, + "harness|mmlu|0": { + "acc,none": 0.5061956986184304, + "acc_stderr,none": 0.004020003727804312, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.45993623804463335, + "acc_stderr,none": 0.006890141515530341 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.3253968253968254, + "acc_stderr,none": 0.041905964388711366 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.6727272727272727, + "acc_stderr,none": 0.03663974994391242 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.696078431372549, + "acc_stderr,none": 0.03228210387037894 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.7088607594936709, + "acc_stderr,none": 0.029571601065753374 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.6198347107438017, + "acc_stderr,none": 0.04431324501968432 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.6296296296296297, + "acc_stderr,none": 0.04668408033024931 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.588957055214724, + "acc_stderr,none": 0.038656978537853624 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.5028901734104047, + "acc_stderr,none": 0.02691864538323901 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.2446927374301676, + "acc_stderr,none": 0.014378169884098423 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6012861736334405, + "acc_stderr,none": 0.0278093225857745 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.5679012345679012, + "acc_stderr,none": 0.02756301097160667 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.3741851368970013, + "acc_stderr,none": 0.012359335618172056 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7309941520467836, + "acc_stderr,none": 0.03401052620104089 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.5902800128741551, + "acc_stderr,none": 0.00854032139746814 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.61, + "acc_stderr,none": 0.04902071300001974 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.569811320754717, + "acc_stderr,none": 0.030471445867183235 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.48554913294797686, + "acc_stderr,none": 0.03810871630454764 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.5650224215246636, + "acc_stderr,none": 0.033272833702713445 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.6407766990291263, + "acc_stderr,none": 0.047504583990416946 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.7777777777777778, + "acc_stderr,none": 0.0272360139461967 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.6, + "acc_stderr,none": 0.049236596391733084 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.7279693486590039, + "acc_stderr,none": 0.015913367447500517 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.5816993464052288, + "acc_stderr,none": 0.02824513402438729 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.3829787234042553, + "acc_stderr,none": 0.02899908090480618 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.5220588235294118, + "acc_stderr,none": 0.030343264224213528 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.4578313253012048, + "acc_stderr,none": 0.0387862677100236 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.5823854403639909, + "acc_stderr,none": 0.00859896000077255 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.24561403508771928, + "acc_stderr,none": 0.040493392977481404 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.6666666666666666, + "acc_stderr,none": 0.03358618145732523 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.7305699481865285, + "acc_stderr,none": 0.03201867122877793 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.44358974358974357, + "acc_stderr,none": 0.0251891498947642 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.4957983193277311, + "acc_stderr,none": 0.0324773433444811 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.7009174311926606, + "acc_stderr,none": 0.019630417285415182 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.6870229007633588, + "acc_stderr,none": 0.04066962905677697 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.477124183006536, + "acc_stderr,none": 0.020206653187884786 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6545454545454545, + "acc_stderr,none": 0.04554619617541054 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.5877551020408164, + "acc_stderr,none": 0.031512360446742674 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.7213930348258707, + "acc_stderr,none": 0.031700561834973086 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.75, + "acc_stderr,none": 0.04351941398892446 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.41801458928005075, + "acc_stderr,none": 0.008577192198635467 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.32, + "acc_stderr,none": 0.04688261722621503 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.4888888888888889, + "acc_stderr,none": 0.043182754919779756 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.5460526315789473, + "acc_stderr,none": 0.04051646342874143 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.5347222222222222, + "acc_stderr,none": 0.04171115858181618 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.41, + "acc_stderr,none": 0.04943110704237102 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.52, + "acc_stderr,none": 0.050211673156867795 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.33, + "acc_stderr,none": 0.04725815626252604 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.28431372549019607, + "acc_stderr,none": 0.04488482852329017 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.64, + "acc_stderr,none": 0.048241815132442176 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.43829787234042555, + "acc_stderr,none": 0.03243618636108101 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.4068965517241379, + "acc_stderr,none": 0.04093793981266237 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.29365079365079366, + "acc_stderr,none": 0.023456037383982022 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.5806451612903226, + "acc_stderr,none": 0.02807158890109185 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.4088669950738916, + "acc_stderr,none": 0.034590588158832314 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.51, + "acc_stderr,none": 0.05024183937956913 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.24444444444444444, + "acc_stderr,none": 0.026202766534652148 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.33112582781456956, + "acc_stderr,none": 0.038425817186598696 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.46296296296296297, + "acc_stderr,none": 0.03400603625538272 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.3392857142857143, + "acc_stderr,none": 0.04493949068613538 + }, + "harness|piqa|0": { + "acc,none": 0.735582154515778, + "acc_stderr,none": 0.010289787244767182, + "acc_norm,none": 0.735582154515778, + "acc_norm_stderr,none": 0.010289787244767175, + "alias": "piqa" + }, + "harness|hellaswag|0": { + "acc,none": 0.5289782911770564, + "acc_stderr,none": 0.004981394110706147, + "acc_norm,none": 0.7034455287791277, + "acc_norm_stderr,none": 0.004558049018764614, + "alias": "hellaswag" + }, + "harness|openbookqa|0": { + "acc,none": 0.31, + "acc_stderr,none": 0.0207040410217248, + "acc_norm,none": 0.398, + "acc_norm_stderr,none": 0.021912377885779977, + "alias": "openbookqa" + }, + "harness|arc:easy|0": { + "acc,none": 0.7201178451178452, + "acc_stderr,none": 0.00921207752465653, + "acc_norm,none": 0.6721380471380471, + "acc_norm_stderr,none": 0.009632587076170016, + "alias": "arc_easy" + }, + "harness|winogrande|0": { + "acc,none": 0.6858721389108129, + "acc_stderr,none": 0.013045416716072566, + "alias": "winogrande" + } + }, + "task_info": { + "model": "Intel/Baichuan2-7B-Chat-int4-inc", + "local": true, + "revision": "main", + "private": false, + "params": 7, + "architectures": "BaiChuan2ForCausalLM", + "quant_type": "AutoRound", + "precision": "4bit", + "model_params": 7, + "model_size": 4, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-23T15:44:22Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "autoround_version": "0.1", + "bits": 4, + "damp_percent": 0.01, + "desc_act": false, + "enable_minmax_tuning": true, + "group_size": 128, + "is_marlin_format": false, + "iters": 1000, + "lr": 0.001, + "minmax_lr": 0.002, + "model_file_base_name": "model", + "model_name_or_path": null, + "quant_method": "gptq", + "scale_dtype": "torch.float16", + "static_groups": false, + "sym": false, + "true_sequential": false, + "use_quant_input": true + }, + "versions": { + "harness|truthfulqa:mc2|0": 2.0, + "harness|arc:challenge|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|boolq|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|piqa|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|winogrande|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714987804.8012214, + "config": { + "model": "hf", + "model_args": "pretrained=Intel/Baichuan2-7B-Chat-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/Intel/results_2024-05-06-20-48-10.json b/Intel/results_2024-05-06-20-48-10.json new file mode 100644 index 0000000000000000000000000000000000000000..e8717e13a3285e29163741e34ee7910755f844d2 --- /dev/null +++ b/Intel/results_2024-05-06-20-48-10.json @@ -0,0 +1,596 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-06-20-48-10", + "total_evaluation_time_secondes": "", + "model_name": "Intel/Baichuan2-13B-Chat-int4-inc", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 9.14, + "model_params": 12.72, + "quant_type": "AutoRound", + "precision": "4bit" + }, + "results": { + "harness|lambada:openai|0": { + "perplexity,none": 3.1866066378427735, + "perplexity_stderr,none": 0.08196592947860162, + "acc,none": 0.7131767902192897, + "acc_stderr,none": 0.006301120995354314, + "alias": "lambada_openai" + }, + "harness|winogrande|0": { + "acc,none": 0.7300710339384373, + "acc_stderr,none": 0.012476433372002596, + "alias": "winogrande" + }, + "harness|boolq|0": { + "acc,none": 0.8211009174311926, + "acc_stderr,none": 0.006703395833491562, + "alias": "boolq" + }, + "harness|piqa|0": { + "acc,none": 0.7584330794341676, + "acc_stderr,none": 0.009986718001804486, + "acc_norm,none": 0.7589771490750816, + "acc_norm_stderr,none": 0.009979042717267312, + "alias": "piqa" + }, + "harness|mmlu|0": { + "acc,none": 0.557826520438684, + "acc_stderr,none": 0.003961001443358886, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5115834218916047, + "acc_stderr,none": 0.0068344977913292115 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.42063492063492064, + "acc_stderr,none": 0.04415438226743743 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7333333333333333, + "acc_stderr,none": 0.03453131801885417 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.7941176470588235, + "acc_stderr,none": 0.028379449451588663 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.7341772151898734, + "acc_stderr,none": 0.028756799629658335 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.6859504132231405, + "acc_stderr,none": 0.04236964753041018 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.6851851851851852, + "acc_stderr,none": 0.04489931073591312 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.6993865030674846, + "acc_stderr,none": 0.0360251131880677 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.6127167630057804, + "acc_stderr,none": 0.026226158605124655 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.24692737430167597, + "acc_stderr,none": 0.014422292204808838 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6237942122186495, + "acc_stderr,none": 0.027513925683549427 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.6296296296296297, + "acc_stderr,none": 0.026869490744815247 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.4335071707953064, + "acc_stderr,none": 0.012656810383983972 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7602339181286549, + "acc_stderr,none": 0.03274485211946956 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.638236240746701, + "acc_stderr,none": 0.008303983171879983 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.58, + "acc_stderr,none": 0.049604496374885836 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.6, + "acc_stderr,none": 0.03015113445777629 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.5375722543352601, + "acc_stderr,none": 0.0380168510452446 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.36, + "acc_stderr,none": 0.048241815132442176 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6278026905829597, + "acc_stderr,none": 0.032443052830087304 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.7766990291262136, + "acc_stderr,none": 0.04123553189891431 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8247863247863247, + "acc_stderr,none": 0.02490443909891822 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.66, + "acc_stderr,none": 0.04760952285695237 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.7854406130268199, + "acc_stderr,none": 0.014680033956893346 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.6078431372549019, + "acc_stderr,none": 0.027956046165424516 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.4432624113475177, + "acc_stderr,none": 0.029634838473766006 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.5661764705882353, + "acc_stderr,none": 0.03010563657001663 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.46987951807228917, + "acc_stderr,none": 0.03885425420866767 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.6441338966525837, + "acc_stderr,none": 0.008375137422879046 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.35964912280701755, + "acc_stderr,none": 0.04514496132873632 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7121212121212122, + "acc_stderr,none": 0.03225883512300992 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.7979274611398963, + "acc_stderr,none": 0.02897908979429673 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.5282051282051282, + "acc_stderr,none": 0.0253106392549339 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.5252100840336135, + "acc_stderr,none": 0.0324371805513741 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.7614678899082569, + "acc_stderr,none": 0.018272575810231867 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.6946564885496184, + "acc_stderr,none": 0.04039314978724562 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.5604575163398693, + "acc_stderr,none": 0.02007942040808792 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.5727272727272728, + "acc_stderr,none": 0.04738198703545483 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.6612244897959184, + "acc_stderr,none": 0.030299506562154188 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.7761194029850746, + "acc_stderr,none": 0.029475250236017193 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.85, + "acc_stderr,none": 0.0358870281282637 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.46336822074215034, + "acc_stderr,none": 0.008568121791309898 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.5259259259259259, + "acc_stderr,none": 0.04313531696750575 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.6052631578947368, + "acc_stderr,none": 0.039777499346220734 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.6388888888888888, + "acc_stderr,none": 0.04016660030451232 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.41, + "acc_stderr,none": 0.04943110704237102 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.49, + "acc_stderr,none": 0.05024183937956911 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.31, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.4019607843137255, + "acc_stderr,none": 0.04878608714466996 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.71, + "acc_stderr,none": 0.04560480215720685 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.4851063829787234, + "acc_stderr,none": 0.032671518489247764 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.503448275862069, + "acc_stderr,none": 0.04166567577101579 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.37037037037037035, + "acc_stderr,none": 0.02487081525105711 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7096774193548387, + "acc_stderr,none": 0.02582210611941589 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.43842364532019706, + "acc_stderr,none": 0.03491207857486519 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.58, + "acc_stderr,none": 0.049604496374885836 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.26666666666666666, + "acc_stderr,none": 0.026962424325073824 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.3708609271523179, + "acc_stderr,none": 0.03943966699183629 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.38425925925925924, + "acc_stderr,none": 0.03317354514310742 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.3392857142857143, + "acc_stderr,none": 0.04493949068613539 + }, + "harness|arc:easy|0": { + "acc,none": 0.7491582491582491, + "acc_stderr,none": 0.008895183010487391, + "acc_norm,none": 0.7041245791245792, + "acc_norm_stderr,none": 0.00936585413414007, + "alias": "arc_easy" + }, + "harness|arc:challenge|0": { + "acc,none": 0.4735494880546075, + "acc_stderr,none": 0.014590931358120163, + "acc_norm,none": 0.4718430034129693, + "acc_norm_stderr,none": 0.014588204105102203, + "alias": "arc_challenge" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.5149555773005338, + "acc_stderr,none": 0.01579541827157361, + "alias": "truthfulqa_mc2" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.3635250917992656, + "acc_stderr,none": 0.016838862883965834, + "alias": "truthfulqa_mc1" + }, + "harness|hellaswag|0": { + "acc,none": 0.5670185222067318, + "acc_stderr,none": 0.004944755230598397, + "acc_norm,none": 0.7461661023700458, + "acc_norm_stderr,none": 0.004343142545094187, + "alias": "hellaswag" + }, + "harness|openbookqa|0": { + "acc,none": 0.312, + "acc_stderr,none": 0.02074059653648807, + "acc_norm,none": 0.43, + "acc_norm_stderr,none": 0.02216263442665284, + "alias": "openbookqa" + } + }, + "task_info": { + "model": "Intel/Baichuan2-13B-Chat-int4-inc", + "local": true, + "revision": "main", + "private": false, + "params": 13, + "architectures": "BaiChuanForCausalLM", + "quant_type": "AutoRound", + "precision": "4bit", + "model_params": 13, + "model_size": 7.3, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-23T15:44:22Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "autoround_version": "0.1", + "bits": 4, + "damp_percent": 0.01, + "desc_act": false, + "enable_minmax_tuning": true, + "group_size": 128, + "is_marlin_format": false, + "iters": 1000, + "lr": 0.001, + "minmax_lr": 0.002, + "model_file_base_name": "model", + "model_name_or_path": null, + "quant_method": "gptq", + "scale_dtype": "torch.float16", + "static_groups": false, + "sym": false, + "true_sequential": false, + "use_quant_input": true + }, + "versions": { + "harness|lambada:openai|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|boolq|0": 2.0, + "harness|piqa|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|arc:easy|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|hellaswag|0": 1.0, + "harness|openbookqa|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714992606.0254002, + "config": { + "model": "hf", + "model_args": "pretrained=Intel/Baichuan2-13B-Chat-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/Intel/results_2024-05-09-05-59-04.json b/Intel/results_2024-05-09-05-59-04.json new file mode 100644 index 0000000000000000000000000000000000000000..90e9e9201018948188c96d8cdc320989b8f50ded --- /dev/null +++ b/Intel/results_2024-05-09-05-59-04.json @@ -0,0 +1,596 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-09-05-59-04", + "total_evaluation_time_secondes": "", + "model_name": "Intel/Llama-2-7b-chat-hf-int4-inc", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 3.5, + "model_params": 7.0, + "quant_type": "AutoRound", + "precision": "4bit" + }, + "results": { + "harness|arc:easy|0": { + "acc,none": 0.7335858585858586, + "acc_stderr,none": 0.009071357971078681, + "acc_norm,none": 0.6982323232323232, + "acc_norm_stderr,none": 0.009418994158522532, + "alias": "arc_easy" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.46442705279789154, + "acc_stderr,none": 0.015722742603615374, + "alias": "truthfulqa_mc2" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.330271144590987, + "perplexity_stderr,none": 0.08880199591810012, + "acc,none": 0.7017271492334562, + "acc_stderr,none": 0.006373868144287134, + "alias": "lambada_openai" + }, + "harness|hellaswag|0": { + "acc,none": 0.5699063931487751, + "acc_stderr,none": 0.004940771559475488, + "acc_norm,none": 0.7482573192591118, + "acc_norm_stderr,none": 0.004331271717773906, + "alias": "hellaswag" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.31456548347613217, + "acc_stderr,none": 0.01625524199317917, + "alias": "truthfulqa_mc1" + }, + "harness|piqa|0": { + "acc,none": 0.766050054406964, + "acc_stderr,none": 0.00987723689513747, + "acc_norm,none": 0.764417845484222, + "acc_norm_stderr,none": 0.009901067586473909, + "alias": "piqa" + }, + "harness|arc:challenge|0": { + "acc,none": 0.4402730375426621, + "acc_stderr,none": 0.014506769524804246, + "acc_norm,none": 0.43686006825938567, + "acc_norm_stderr,none": 0.014494421584256525, + "alias": "arc_challenge" + }, + "harness|winogrande|0": { + "acc,none": 0.6582478295185478, + "acc_stderr,none": 0.013330103018622847, + "alias": "winogrande" + }, + "harness|mmlu|0": { + "acc,none": 0.4620424440962826, + "acc_stderr,none": 0.004045238527354896, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.42848034006376196, + "acc_stderr,none": 0.0069328072054616755 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.25396825396825395, + "acc_stderr,none": 0.03893259610604674 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.5636363636363636, + "acc_stderr,none": 0.03872592983524753 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.6519607843137255, + "acc_stderr,none": 0.03343311240488419 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.620253164556962, + "acc_stderr,none": 0.031591887529658504 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.6033057851239669, + "acc_stderr,none": 0.04465869780531009 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.5555555555555556, + "acc_stderr,none": 0.04803752235190192 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.5705521472392638, + "acc_stderr,none": 0.038890666191127216 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.5057803468208093, + "acc_stderr,none": 0.026917296179149123 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.2424581005586592, + "acc_stderr,none": 0.014333522059217887 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.5144694533762058, + "acc_stderr,none": 0.02838619808417768 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.5277777777777778, + "acc_stderr,none": 0.027777777777777804 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.35658409387222945, + "acc_stderr,none": 0.012233642989273891 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.672514619883041, + "acc_stderr,none": 0.035993357714560276 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.5494045703250724, + "acc_stderr,none": 0.008696781011633008 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.51, + "acc_stderr,none": 0.05024183937956913 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.539622641509434, + "acc_stderr,none": 0.030676096599389184 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.3988439306358382, + "acc_stderr,none": 0.03733626655383509 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.41, + "acc_stderr,none": 0.049431107042371025 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.5964125560538116, + "acc_stderr,none": 0.03292802819330314 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.6407766990291263, + "acc_stderr,none": 0.04750458399041696 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.7307692307692307, + "acc_stderr,none": 0.02905858830374884 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.51, + "acc_stderr,none": 0.05024183937956911 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.6717752234993615, + "acc_stderr,none": 0.016791685640192892 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.5261437908496732, + "acc_stderr,none": 0.028590752958852387 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.36879432624113473, + "acc_stderr,none": 0.028782227561347254 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.4227941176470588, + "acc_stderr,none": 0.030008562845003476 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.4578313253012048, + "acc_stderr,none": 0.0387862677100236 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.5271368215794605, + "acc_stderr,none": 0.008780899524181778 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.2894736842105263, + "acc_stderr,none": 0.04266339443159394 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.5757575757575758, + "acc_stderr,none": 0.035212249088415845 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.6683937823834197, + "acc_stderr,none": 0.03397636541089118 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.4230769230769231, + "acc_stderr,none": 0.02504919787604234 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.37815126050420167, + "acc_stderr,none": 0.031499305777849054 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.6165137614678899, + "acc_stderr,none": 0.02084715664191598 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.5572519083969466, + "acc_stderr,none": 0.04356447202665069 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.4444444444444444, + "acc_stderr,none": 0.02010258389588718 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.5727272727272728, + "acc_stderr,none": 0.04738198703545483 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.5387755102040817, + "acc_stderr,none": 0.031912820526692774 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.7263681592039801, + "acc_stderr,none": 0.03152439186555402 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.69, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.362511893434824, + "acc_stderr,none": 0.008406035153052648 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.48148148148148145, + "acc_stderr,none": 0.043163785995113245 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.46710526315789475, + "acc_stderr,none": 0.040601270352363966 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.4583333333333333, + "acc_stderr,none": 0.04166666666666665 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.24, + "acc_stderr,none": 0.04292346959909283 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.29, + "acc_stderr,none": 0.04560480215720684 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.18627450980392157, + "acc_stderr,none": 0.038739587141493545 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.58, + "acc_stderr,none": 0.049604496374885836 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.39574468085106385, + "acc_stderr,none": 0.03196758697835362 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.45517241379310347, + "acc_stderr,none": 0.04149886942192117 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.2962962962962963, + "acc_stderr,none": 0.02351729433596329 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.5064516129032258, + "acc_stderr,none": 0.028441638233540505 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.3103448275862069, + "acc_stderr,none": 0.032550867699701024 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.38, + "acc_stderr,none": 0.04878317312145633 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.2962962962962963, + "acc_stderr,none": 0.027840811495871927 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.271523178807947, + "acc_stderr,none": 0.036313298039696525 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.2638888888888889, + "acc_stderr,none": 0.03005820270430985 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.39285714285714285, + "acc_stderr,none": 0.04635550135609976 + }, + "harness|boolq|0": { + "acc,none": 0.8064220183486238, + "acc_stderr,none": 0.006910376454601405, + "alias": "boolq" + }, + "harness|openbookqa|0": { + "acc,none": 0.334, + "acc_stderr,none": 0.021113492347743727, + "acc_norm,none": 0.438, + "acc_norm_stderr,none": 0.022210326363977417, + "alias": "openbookqa" + } + }, + "task_info": { + "model": "Intel/Llama-2-7b-chat-hf-int4-inc", + "local": true, + "revision": "main", + "private": false, + "params": 7.0, + "architectures": "LlamaForCausalLM", + "quant_type": "AutoRound", + "precision": "4bit", + "model_params": 7.0, + "model_size": 3.5, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-05-08T15:44:22Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "autoround_version": "0.2.0.dev", + "bits": 4, + "damp_percent": 0.01, + "desc_act": false, + "enable_minmax_tuning": true, + "enable_quanted_input": true, + "group_size": 128, + "is_marlin_format": false, + "iters": 200, + "lr": 0.005, + "minmax_lr": 0.005, + "model_file_base_name": "model", + "model_name_or_path": null, + "quant_method": "gptq", + "scale_dtype": "float16", + "static_groups": false, + "sym": false, + "true_sequential": false + }, + "versions": { + "harness|arc:easy|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|lambada:openai|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|piqa|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|boolq|0": 2.0, + "harness|openbookqa|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715201004.6439905, + "config": { + "model": "hf", + "model_args": "pretrained=Intel/Llama-2-7b-chat-hf-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/Intel/results_2024-05-10-10-37-29.json b/Intel/results_2024-05-10-10-37-29.json new file mode 100644 index 0000000000000000000000000000000000000000..1962199e0f9459c432e08534e742c3f021feb2ad --- /dev/null +++ b/Intel/results_2024-05-10-10-37-29.json @@ -0,0 +1,596 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-10-10-37-29", + "total_evaluation_time_secondes": "", + "model_name": "Intel/Meta-Llama-3-8B-Instruct-int4-inc", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 5.4, + "model_params": 7.2, + "quant_type": "AutoRound", + "precision": "4bit" + }, + "results": { + "harness|truthfulqa:mc2|0": { + "acc,none": 0.5146934010760681, + "acc_stderr,none": 0.0152582547538094, + "alias": "truthfulqa_mc2" + }, + "harness|hellaswag|0": { + "acc,none": 0.570902210714997, + "acc_stderr,none": 0.0049393581455613, + "acc_norm,none": 0.7518422624975104, + "acc_norm_stderr,none": 0.004310610616845716, + "alias": "hellaswag" + }, + "harness|arc:easy|0": { + "acc,none": 0.811026936026936, + "acc_stderr,none": 0.00803314829980193, + "acc_norm,none": 0.789983164983165, + "acc_norm_stderr,none": 0.008358034622322224, + "alias": "arc_easy" + }, + "harness|mmlu|0": { + "acc,none": 0.6241276171485544, + "acc_stderr,none": 0.00383558025213648, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5628055260361318, + "acc_stderr,none": 0.006689295581999755 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.49206349206349204, + "acc_stderr,none": 0.044715725362943486 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7151515151515152, + "acc_stderr,none": 0.03524390844511781 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.8137254901960784, + "acc_stderr,none": 0.027325470966716333 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.810126582278481, + "acc_stderr,none": 0.02553010046023351 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.743801652892562, + "acc_stderr,none": 0.03984979653302872 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.8055555555555556, + "acc_stderr,none": 0.038260763248848646 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7791411042944786, + "acc_stderr,none": 0.032591773927421776 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.7023121387283237, + "acc_stderr,none": 0.024617055388677006 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.26256983240223464, + "acc_stderr,none": 0.014716824273017765 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6913183279742765, + "acc_stderr,none": 0.026236965881153256 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.7376543209876543, + "acc_stderr,none": 0.024477222856135114 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.4830508474576271, + "acc_stderr,none": 0.012762896889210867 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7777777777777778, + "acc_stderr,none": 0.03188578017686398 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.7093659478596717, + "acc_stderr,none": 0.007876660614156239 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.67, + "acc_stderr,none": 0.04725815626252607 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.7056603773584905, + "acc_stderr,none": 0.02804918631569524 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.653179190751445, + "acc_stderr,none": 0.03629146670159663 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.41, + "acc_stderr,none": 0.04943110704237102 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6860986547085202, + "acc_stderr,none": 0.031146796482972465 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.8252427184466019, + "acc_stderr,none": 0.037601780060266196 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8931623931623932, + "acc_stderr,none": 0.020237149008990922 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.79, + "acc_stderr,none": 0.040936018074033256 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.8033205619412516, + "acc_stderr,none": 0.014214138556913912 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.7189542483660131, + "acc_stderr,none": 0.025738854797818723 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.5319148936170213, + "acc_stderr,none": 0.029766675075873866 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.7022058823529411, + "acc_stderr,none": 0.027778298701545443 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.4819277108433735, + "acc_stderr,none": 0.03889951252827216 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.7331816704582386, + "acc_stderr,none": 0.007832357132624299 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.49122807017543857, + "acc_stderr,none": 0.04702880432049615 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7676767676767676, + "acc_stderr,none": 0.03008862949021749 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8652849740932642, + "acc_stderr,none": 0.024639789097709437 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.6435897435897436, + "acc_stderr,none": 0.024283140529467305 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.7100840336134454, + "acc_stderr,none": 0.029472485833136094 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.8128440366972477, + "acc_stderr,none": 0.016722684526200165 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.7709923664122137, + "acc_stderr,none": 0.036853466317118506 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.6699346405228758, + "acc_stderr,none": 0.019023726160724553 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6818181818181818, + "acc_stderr,none": 0.044612721759105085 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.7306122448979592, + "acc_stderr,none": 0.02840125202902294 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.8407960199004975, + "acc_stderr,none": 0.02587064676616914 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.84, + "acc_stderr,none": 0.03684529491774709 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.5252140818268316, + "acc_stderr,none": 0.008572192732014554 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.27, + "acc_stderr,none": 0.044619604333847415 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.6222222222222222, + "acc_stderr,none": 0.04188307537595853 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.6842105263157895, + "acc_stderr,none": 0.0378272898086547 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.7291666666666666, + "acc_stderr,none": 0.037161774375660164 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.41, + "acc_stderr,none": 0.049431107042371025 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.47, + "acc_stderr,none": 0.05016135580465919 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.32, + "acc_stderr,none": 0.046882617226215034 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.46078431372549017, + "acc_stderr,none": 0.04959859966384181 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.75, + "acc_stderr,none": 0.04351941398892446 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.5404255319148936, + "acc_stderr,none": 0.03257901482099834 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.6206896551724138, + "acc_stderr,none": 0.040434618619167466 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.4497354497354497, + "acc_stderr,none": 0.02562085704293665 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7419354838709677, + "acc_stderr,none": 0.024892469172462836 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.45320197044334976, + "acc_stderr,none": 0.03502544650845872 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.68, + "acc_stderr,none": 0.04688261722621504 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.3592592592592593, + "acc_stderr,none": 0.029252905927251976 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.4370860927152318, + "acc_stderr,none": 0.04050035722230636 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.46296296296296297, + "acc_stderr,none": 0.03400603625538272 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.48214285714285715, + "acc_stderr,none": 0.047427623612430116 + }, + "harness|piqa|0": { + "acc,none": 0.7861806311207835, + "acc_stderr,none": 0.009565994206915594, + "acc_norm,none": 0.7856365614798694, + "acc_norm_stderr,none": 0.009574842136050933, + "alias": "piqa" + }, + "harness|boolq|0": { + "acc,none": 0.8321100917431192, + "acc_stderr,none": 0.006537252053566842, + "alias": "boolq" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.3635250917992656, + "acc_stderr,none": 0.016838862883965838, + "alias": "truthfulqa_mc1" + }, + "harness|winogrande|0": { + "acc,none": 0.7213891081294396, + "acc_stderr,none": 0.01259989664949388, + "alias": "winogrande" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.24000265267783, + "perplexity_stderr,none": 0.0824803761631906, + "acc,none": 0.7131767902192897, + "acc_stderr,none": 0.006301120995354307, + "alias": "lambada_openai" + }, + "harness|arc:challenge|0": { + "acc,none": 0.5187713310580204, + "acc_stderr,none": 0.014601090150633964, + "acc_norm,none": 0.5699658703071673, + "acc_norm_stderr,none": 0.014467631559137994, + "alias": "arc_challenge" + }, + "harness|openbookqa|0": { + "acc,none": 0.352, + "acc_stderr,none": 0.021380042385946048, + "acc_norm,none": 0.428, + "acc_norm_stderr,none": 0.022149790663861926, + "alias": "openbookqa" + } + }, + "task_info": { + "model": "Intel/Meta-Llama-3-8B-Instruct-int4-inc", + "local": true, + "revision": "main", + "private": false, + "params": 7.2, + "architectures": "LlamaForCausalLM", + "quant_type": "AutoRound", + "precision": "4bit", + "model_params": 7.2, + "model_size": 5.4, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Finished", + "submitted_time": "2024-05-08T15:44:22Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "autoround_version": "0.2.0.dev", + "bits": 4, + "damp_percent": 0.01, + "desc_act": false, + "enable_minmax_tuning": true, + "enable_quanted_input": true, + "group_size": 128, + "is_marlin_format": false, + "iters": 200, + "lr": 0.005, + "minmax_lr": 0.005, + "model_file_base_name": "model", + "model_name_or_path": null, + "quant_method": "gptq", + "scale_dtype": "float16", + "static_groups": false, + "sym": false, + "true_sequential": false + }, + "versions": { + "harness|truthfulqa:mc2|0": 2.0, + "harness|hellaswag|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|piqa|0": 1.0, + "harness|boolq|0": 2.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|winogrande|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|openbookqa|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715304126.9653401, + "config": { + "model": "hf", + "model_args": "pretrained=Intel/Meta-Llama-3-8B-Instruct-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/Intel/results_2024-05-18-13-12-26.json b/Intel/results_2024-05-18-13-12-26.json new file mode 100644 index 0000000000000000000000000000000000000000..7eedb4f4d391542bd490f07379aa86fca19d818a --- /dev/null +++ b/Intel/results_2024-05-18-13-12-26.json @@ -0,0 +1,595 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-18-13-12-26", + "total_evaluation_time_secondes": "", + "model_name": "Intel/SOLAR-10.7B-Instruct-v1.0-int4-inc-v0.3", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 5.6, + "model_params": 10.7, + "quant_type": "AutoRound", + "precision": "4bit" + }, + "results": { + "harness|winogrande|0": { + "acc,none": 0.7584846093133386, + "acc_stderr,none": 0.01202898378201188, + "alias": "winogrande" + }, + "harness|arc:challenge|0": { + "acc,none": 0.6040955631399317, + "acc_stderr,none": 0.014291228393536592, + "acc_norm,none": 0.6305460750853242, + "acc_norm_stderr,none": 0.014104578366491887, + "alias": "arc_challenge" + }, + "harness|mmlu|0": { + "acc,none": 0.6271186440677966, + "acc_stderr,none": 0.00382640388007823, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5732199787460148, + "acc_stderr,none": 0.006665399518756252 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.3888888888888889, + "acc_stderr,none": 0.04360314860077459 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7757575757575758, + "acc_stderr,none": 0.032568666616811015 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.8382352941176471, + "acc_stderr,none": 0.025845017986926924 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.8396624472573839, + "acc_stderr,none": 0.023884380925965672 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.8099173553719008, + "acc_stderr,none": 0.035817969517092825 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7592592592592593, + "acc_stderr,none": 0.041331194402438376 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7300613496932515, + "acc_stderr,none": 0.0348782516849789 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.7254335260115607, + "acc_stderr,none": 0.02402774515526501 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.2927374301675978, + "acc_stderr,none": 0.01521810954441019 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.7041800643086816, + "acc_stderr,none": 0.025922371788818788 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.7376543209876543, + "acc_stderr,none": 0.024477222856135114 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.48435462842242505, + "acc_stderr,none": 0.012763982838120962 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.8011695906432749, + "acc_stderr,none": 0.03061111655743253 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.7048599935629224, + "acc_stderr,none": 0.007909488801050074 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.62, + "acc_stderr,none": 0.048783173121456316 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.7132075471698113, + "acc_stderr,none": 0.02783491252754407 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.6242774566473989, + "acc_stderr,none": 0.036928207672648664 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.41, + "acc_stderr,none": 0.04943110704237102 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6681614349775785, + "acc_stderr,none": 0.03160295143776679 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.8155339805825242, + "acc_stderr,none": 0.03840423627288276 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8717948717948718, + "acc_stderr,none": 0.02190190511507333 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.76, + "acc_stderr,none": 0.042923469599092816 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.8007662835249042, + "acc_stderr,none": 0.014283378044296413 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.7450980392156863, + "acc_stderr,none": 0.02495418432487991 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.4858156028368794, + "acc_stderr,none": 0.02981549448368206 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.7205882352941176, + "acc_stderr,none": 0.027257202606114948 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.536144578313253, + "acc_stderr,none": 0.03882310850890593 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.7367565810854728, + "acc_stderr,none": 0.007759497044594671 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.5, + "acc_stderr,none": 0.047036043419179864 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.8282828282828283, + "acc_stderr,none": 0.026869716187429917 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8808290155440415, + "acc_stderr,none": 0.023381935348121448 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.658974358974359, + "acc_stderr,none": 0.02403548967633508 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.6932773109243697, + "acc_stderr,none": 0.029953823891887037 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.8293577981651377, + "acc_stderr,none": 0.016129271025099878 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.7404580152671756, + "acc_stderr,none": 0.03844876139785271 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.6503267973856209, + "acc_stderr,none": 0.01929196189506638 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.7, + "acc_stderr,none": 0.04389311454644287 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.7061224489795919, + "acc_stderr,none": 0.02916273841024977 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.8308457711442786, + "acc_stderr,none": 0.02650859065623324 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.9, + "acc_stderr,none": 0.030151134457776348 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.5239454487789407, + "acc_stderr,none": 0.00856605560521422 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.37, + "acc_stderr,none": 0.04852365870939098 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.5925925925925926, + "acc_stderr,none": 0.04244633238353228 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.6973684210526315, + "acc_stderr,none": 0.03738520676119667 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.7291666666666666, + "acc_stderr,none": 0.037161774375660164 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.45, + "acc_stderr,none": 0.05 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.49, + "acc_stderr,none": 0.05024183937956911 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.29, + "acc_stderr,none": 0.04560480215720683 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.4019607843137255, + "acc_stderr,none": 0.04878608714466996 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.7, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.5446808510638298, + "acc_stderr,none": 0.03255525359340355 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5448275862068965, + "acc_stderr,none": 0.04149886942192118 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.47883597883597884, + "acc_stderr,none": 0.025728230952130716 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7612903225806451, + "acc_stderr,none": 0.024251071262208837 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.458128078817734, + "acc_stderr,none": 0.03505630140785741 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.65, + "acc_stderr,none": 0.0479372485441102 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.35555555555555557, + "acc_stderr,none": 0.029185714949857416 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.2980132450331126, + "acc_stderr,none": 0.03734535676787198 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.5416666666666666, + "acc_stderr,none": 0.03398110890294636 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.44642857142857145, + "acc_stderr,none": 0.04718471485219588 + }, + "harness|arc:easy|0": { + "acc,none": 0.8354377104377104, + "acc_stderr,none": 0.007608348390282838, + "acc_norm,none": 0.8202861952861953, + "acc_norm_stderr,none": 0.007878465068489264, + "alias": "arc_easy" + }, + "harness|boolq|0": { + "acc,none": 0.8862385321100917, + "acc_stderr,none": 0.005553483010095675, + "alias": "boolq" + }, + "harness|piqa|0": { + "acc,none": 0.8128400435255713, + "acc_stderr,none": 0.00910027329047355, + "acc_norm,none": 0.8122959738846572, + "acc_norm_stderr,none": 0.009110440292132567, + "alias": "piqa" + }, + "harness|openbookqa|0": { + "acc,none": 0.382, + "acc_stderr,none": 0.021750820591250834, + "acc_norm,none": 0.482, + "acc_norm_stderr,none": 0.02236856511738799, + "alias": "openbookqa" + }, + "harness|hellaswag|0": { + "acc,none": 0.6845249950209121, + "acc_stderr,none": 0.004637550478007366, + "acc_norm,none": 0.8606851224855606, + "acc_norm_stderr,none": 0.003455671196993104, + "alias": "hellaswag" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.024395600515001, + "perplexity_stderr,none": 0.06930475108030439, + "acc,none": 0.7351057636328352, + "acc_stderr,none": 0.006147849695828245, + "alias": "lambada_openai" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.7129938777873611, + "acc_stderr,none": 0.014994700631570199, + "alias": "truthfulqa_mc2" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.5691554467564259, + "acc_stderr,none": 0.017335272475332366, + "alias": "truthfulqa_mc1" + } + }, + "task_info": { + "model": "Intel/SOLAR-10.7B-Instruct-v1.0-int4-inc-v0.3", + "revision": "main", + "private": false, + "params": 10.7, + "architectures": "LlamaForCausalLM", + "quant_type": "AutoRound", + "precision": "4bit", + "model_params": 10.7, + "model_size": 5.6, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Waiting", + "submitted_time": "2024-05-11T11:55:16Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "autoround_version": "0.2.0.dev", + "bits": 4, + "damp_percent": 0.01, + "desc_act": false, + "enable_minmax_tuning": true, + "enable_quanted_input": true, + "group_size": 128, + "is_marlin_format": false, + "iters": 1000, + "lr": 0.001, + "minmax_lr": 0.01, + "model_file_base_name": "model", + "model_name_or_path": null, + "quant_method": "gptq", + "scale_dtype": "float16", + "static_groups": false, + "sym": false, + "true_sequential": false + }, + "versions": { + "harness|winogrande|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|arc:easy|0": 1.0, + "harness|boolq|0": 2.0, + "harness|piqa|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|truthfulqa:mc1|0": 2.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715999695.543391, + "config": { + "model": "hf", + "model_args": "pretrained=/dataset/SOLAR-10.7B-Instruct-v1.0-samples1024-1w-2-tbs-16_iter1000,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 1, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/Intel/results_2024-05-18-15-50-33.json b/Intel/results_2024-05-18-15-50-33.json new file mode 100644 index 0000000000000000000000000000000000000000..66a8c48b2136baa0552fa01c7833c9f6449453d8 --- /dev/null +++ b/Intel/results_2024-05-18-15-50-33.json @@ -0,0 +1,595 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-18-15-50-33", + "total_evaluation_time_secondes": "", + "model_name": "Intel/SOLAR-10.7B-Instruct-v1.0-int4-inc-v0.2", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 5.6, + "model_params": 10.7, + "quant_type": "AutoRound", + "precision": "4bit" + }, + "results": { + "harness|boolq|0": { + "acc,none": 0.8871559633027523, + "acc_stderr,none": 0.00553390672214442, + "alias": "boolq" + }, + "harness|openbookqa|0": { + "acc,none": 0.37, + "acc_stderr,none": 0.02161328916516578, + "acc_norm,none": 0.47, + "acc_norm_stderr,none": 0.022342748192502843, + "alias": "openbookqa" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.09900146016817, + "perplexity_stderr,none": 0.07101046844025238, + "acc,none": 0.7281195420143606, + "acc_stderr,none": 0.006198725615232723, + "alias": "lambada_openai" + }, + "harness|mmlu|0": { + "acc,none": 0.6329582680529839, + "acc_stderr,none": 0.003826857804473612, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5806588735387885, + "acc_stderr,none": 0.006695269750055442 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.4126984126984127, + "acc_stderr,none": 0.04403438954768176 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7818181818181819, + "acc_stderr,none": 0.03225078108306289 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.8284313725490197, + "acc_stderr,none": 0.02646056956124066 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.8607594936708861, + "acc_stderr,none": 0.022535526352692712 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.8016528925619835, + "acc_stderr,none": 0.03640118271990947 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7870370370370371, + "acc_stderr,none": 0.0395783547198098 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7239263803680982, + "acc_stderr,none": 0.035123852837050495 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.7196531791907514, + "acc_stderr,none": 0.024182427496577612 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.329608938547486, + "acc_stderr,none": 0.015721531075183866 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.707395498392283, + "acc_stderr,none": 0.02583989833487798 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.7623456790123457, + "acc_stderr,none": 0.02368359183700855 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.4791395045632334, + "acc_stderr,none": 0.012759117066518012 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7719298245614035, + "acc_stderr,none": 0.032180937956023566 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.7077566784679755, + "acc_stderr,none": 0.007899220244504642 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.67, + "acc_stderr,none": 0.04725815626252609 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.6943396226415094, + "acc_stderr,none": 0.028353298073322663 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.6416184971098265, + "acc_stderr,none": 0.03656343653353159 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.38, + "acc_stderr,none": 0.04878317312145632 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6816143497757847, + "acc_stderr,none": 0.03126580522513713 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.8058252427184466, + "acc_stderr,none": 0.03916667762822584 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8547008547008547, + "acc_stderr,none": 0.02308663508684141 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.74, + "acc_stderr,none": 0.04408440022768078 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.8148148148148148, + "acc_stderr,none": 0.013890862162876166 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.7091503267973857, + "acc_stderr,none": 0.02600480036395213 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.5070921985815603, + "acc_stderr,none": 0.02982449855912901 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.7389705882352942, + "acc_stderr,none": 0.026679252270103124 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.5481927710843374, + "acc_stderr,none": 0.03874371556587953 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.7413064673383165, + "acc_stderr,none": 0.0077164131254551915 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.45614035087719296, + "acc_stderr,none": 0.046854730419077895 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.8181818181818182, + "acc_stderr,none": 0.02747960301053878 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8860103626943006, + "acc_stderr,none": 0.022935144053919426 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.6666666666666666, + "acc_stderr,none": 0.023901157979402534 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.6932773109243697, + "acc_stderr,none": 0.02995382389188703 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.8348623853211009, + "acc_stderr,none": 0.015919557829976064 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.7480916030534351, + "acc_stderr,none": 0.03807387116306086 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.6715686274509803, + "acc_stderr,none": 0.018999707383162662 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.7090909090909091, + "acc_stderr,none": 0.04350271442923243 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.7183673469387755, + "acc_stderr,none": 0.028795185574291286 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.8159203980099502, + "acc_stderr,none": 0.02740385941078683 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.89, + "acc_stderr,none": 0.031446603773522014 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.531557247066286, + "acc_stderr,none": 0.008564466581764778 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.36, + "acc_stderr,none": 0.04824181513244218 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.6074074074074074, + "acc_stderr,none": 0.04218506215368879 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.7039473684210527, + "acc_stderr,none": 0.037150621549989056 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.7708333333333334, + "acc_stderr,none": 0.035146974678623884 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.42, + "acc_stderr,none": 0.049604496374885836 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.42, + "acc_stderr,none": 0.04960449637488584 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.34, + "acc_stderr,none": 0.04760952285695236 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.37254901960784315, + "acc_stderr,none": 0.04810840148082633 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.7, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.5914893617021276, + "acc_stderr,none": 0.032134180267015755 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5655172413793104, + "acc_stderr,none": 0.04130740879555498 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.46825396825396826, + "acc_stderr,none": 0.025699352832131792 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7580645161290323, + "acc_stderr,none": 0.024362599693031103 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.4729064039408867, + "acc_stderr,none": 0.03512819077876106 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.65, + "acc_stderr,none": 0.0479372485441102 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.3814814814814815, + "acc_stderr,none": 0.02961671892749758 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.3443708609271523, + "acc_stderr,none": 0.03879687024073327 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.5138888888888888, + "acc_stderr,none": 0.03408655867977749 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.48214285714285715, + "acc_stderr,none": 0.047427623612430116 + }, + "harness|hellaswag|0": { + "acc,none": 0.6846245767775344, + "acc_stderr,none": 0.004637155743563884, + "acc_norm,none": 0.8569010157339175, + "acc_norm_stderr,none": 0.003494581076398528, + "alias": "hellaswag" + }, + "harness|winogrande|0": { + "acc,none": 0.7569060773480663, + "acc_stderr,none": 0.012055665630431043, + "alias": "winogrande" + }, + "harness|arc:challenge|0": { + "acc,none": 0.5998293515358362, + "acc_stderr,none": 0.01431719778780917, + "acc_norm,none": 0.6228668941979523, + "acc_norm_stderr,none": 0.014163366896192598, + "alias": "arc_challenge" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.5691554467564259, + "acc_stderr,none": 0.01733527247533237, + "alias": "truthfulqa_mc1" + }, + "harness|arc:easy|0": { + "acc,none": 0.8257575757575758, + "acc_stderr,none": 0.007783437255488237, + "acc_norm,none": 0.8186026936026936, + "acc_norm_stderr,none": 0.007907153952801702, + "alias": "arc_easy" + }, + "harness|piqa|0": { + "acc,none": 0.7959738846572362, + "acc_stderr,none": 0.009402378102942617, + "acc_norm,none": 0.7986942328618063, + "acc_norm_stderr,none": 0.009355431098990435, + "alias": "piqa" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.7146477892593979, + "acc_stderr,none": 0.01488794988014781, + "alias": "truthfulqa_mc2" + } + }, + "task_info": { + "model": "Intel/SOLAR-10.7B-Instruct-v1.0-int4-inc-v0.2", + "revision": "main", + "private": false, + "params": 10.7, + "architectures": "LlamaForCausalLM", + "quant_type": "AutoRound", + "precision": "4bit", + "model_params": 10.7, + "model_size": 5.6, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Waiting", + "submitted_time": "2024-05-11T11:55:16Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "autoround_version": "0.2.0.dev", + "bits": 4, + "damp_percent": 0.01, + "desc_act": false, + "enable_minmax_tuning": true, + "enable_quanted_input": true, + "group_size": 128, + "is_marlin_format": false, + "iters": 200, + "lr": 0.005, + "minmax_lr": 0.01, + "model_file_base_name": "model", + "model_name_or_path": null, + "quant_method": "gptq", + "scale_dtype": "float16", + "static_groups": false, + "sym": false, + "true_sequential": false + }, + "versions": { + "harness|boolq|0": 2.0, + "harness|openbookqa|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|hellaswag|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|arc:easy|0": 1.0, + "harness|piqa|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1716009494.4991646, + "config": { + "model": "hf", + "model_args": "pretrained=/dataset/SOLAR-10.7B-Instruct-v1.0-samples1024-1w-2-tbs-16,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 1, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/Nan-Do/results_2024-05-21-12-09-53.json b/Nan-Do/results_2024-05-21-12-09-53.json new file mode 100644 index 0000000000000000000000000000000000000000..4c7bdb18bf88b34cf271115779a55b59d8727901 --- /dev/null +++ b/Nan-Do/results_2024-05-21-12-09-53.json @@ -0,0 +1,579 @@ +{ + "config_general": { + "lighteval_sha": "no", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-21-12-09-53", + "total_evaluation_time_secondes": "", + "model_name": "Nan-Do/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B-GGUF", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 13.621387264, + "model_params": 24.153427968, + "quant_type": "llama.cpp", + "precision": "4bit" + }, + "results": { + "harness|hellaswag|0": { + "acc,none": 0.6935869348735312, + "acc_stderr,none": 0.004600612000422642, + "acc_norm,none": 0.8740290778729337, + "acc_norm_stderr,none": 0.00331138449815877, + "alias": "hellaswag" + }, + "harness|winogrande|0": { + "acc,none": 0.8760852407261247, + "acc_stderr,none": 0.009260146295063713, + "alias": "winogrande" + }, + "harness|boolq|0": { + "acc,none": 0.8749235474006116, + "acc_stderr,none": 0.005785826015982954, + "alias": "boolq" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.5960832313341493, + "acc_stderr,none": 0.01717727682258428, + "alias": "truthfulqa_mc1" + }, + "harness|piqa|0": { + "acc,none": 0.8220892274211099, + "acc_stderr,none": 0.008922899948085587, + "acc_norm,none": 0.8378672470076169, + "acc_norm_stderr,none": 0.008599405082519758, + "alias": "piqa" + }, + "harness|mmlu|0": { + "acc,none": 0.6144423871243413, + "acc_stderr,none": 0.0038680796901472215, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5613177470775771, + "acc_stderr,none": 0.0067944330451592365 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.48412698412698413, + "acc_stderr,none": 0.04469881854072606 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7636363636363637, + "acc_stderr,none": 0.033175059300091805 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.803921568627451, + "acc_stderr,none": 0.027865942286639325 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.8059071729957806, + "acc_stderr,none": 0.025744902532290927 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.743801652892562, + "acc_stderr,none": 0.03984979653302872 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.75, + "acc_stderr,none": 0.04186091791394607 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7177914110429447, + "acc_stderr,none": 0.03536117886664743 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.6763005780346821, + "acc_stderr,none": 0.025190181327608415 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.3217877094972067, + "acc_stderr,none": 0.015624236160792577 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6881028938906752, + "acc_stderr,none": 0.02631185807185416 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.7222222222222222, + "acc_stderr,none": 0.024922001168886335 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.4602346805736636, + "acc_stderr,none": 0.012729785386598568 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7894736842105263, + "acc_stderr,none": 0.031267817146631786 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.6887672996459607, + "acc_stderr,none": 0.007961111803791486 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.6, + "acc_stderr,none": 0.049236596391733084 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.6867924528301886, + "acc_stderr,none": 0.02854479331905533 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.6416184971098265, + "acc_stderr,none": 0.03656343653353159 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6547085201793722, + "acc_stderr,none": 0.03191100192835794 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.8058252427184466, + "acc_stderr,none": 0.03916667762822586 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8632478632478633, + "acc_stderr,none": 0.02250903393707781 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.72, + "acc_stderr,none": 0.04512608598542127 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.8148148148148148, + "acc_stderr,none": 0.013890862162876168 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.7091503267973857, + "acc_stderr,none": 0.02600480036395213 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.46808510638297873, + "acc_stderr,none": 0.02976667507587387 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.6691176470588235, + "acc_stderr,none": 0.02858270975389843 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.5120481927710844, + "acc_stderr,none": 0.03891364495835817 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.7240818979525512, + "acc_stderr,none": 0.00786854316152042 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.4824561403508772, + "acc_stderr,none": 0.04700708033551038 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7626262626262627, + "acc_stderr,none": 0.030313710538198892 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8756476683937824, + "acc_stderr,none": 0.023814477086593552 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.6615384615384615, + "acc_stderr,none": 0.023991500500313043 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.6596638655462185, + "acc_stderr,none": 0.030778057422931673 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.8348623853211009, + "acc_stderr,none": 0.015919557829976068 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.7557251908396947, + "acc_stderr,none": 0.03768335959728745 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.6323529411764706, + "acc_stderr,none": 0.019506291693954854 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6272727272727273, + "acc_stderr,none": 0.04631381319425464 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.7183673469387755, + "acc_stderr,none": 0.02879518557429129 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.8258706467661692, + "acc_stderr,none": 0.026814951200421603 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.86, + "acc_stderr,none": 0.034873508801977676 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.5134792261338408, + "acc_stderr,none": 0.008570381942057599 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.28, + "acc_stderr,none": 0.04512608598542126 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.562962962962963, + "acc_stderr,none": 0.042849586397534 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.6710526315789473, + "acc_stderr,none": 0.03823428969926603 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.7152777777777778, + "acc_stderr,none": 0.03773809990686934 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.4, + "acc_stderr,none": 0.049236596391733084 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.55, + "acc_stderr,none": 0.04999999999999999 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.4019607843137255, + "acc_stderr,none": 0.04878608714466997 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.67, + "acc_stderr,none": 0.04725815626252607 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.5404255319148936, + "acc_stderr,none": 0.032579014820998356 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5448275862068965, + "acc_stderr,none": 0.04149886942192117 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.42592592592592593, + "acc_stderr,none": 0.02546714904546955 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7580645161290323, + "acc_stderr,none": 0.024362599693031096 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.4729064039408867, + "acc_stderr,none": 0.03512819077876106 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.68, + "acc_stderr,none": 0.04688261722621505 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.3592592592592593, + "acc_stderr,none": 0.02925290592725198 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.32450331125827814, + "acc_stderr,none": 0.038227469376587525 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.49537037037037035, + "acc_stderr,none": 0.03409825519163572 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.5178571428571429, + "acc_stderr,none": 0.04742762361243011 + }, + "harness|arc:challenge|0": { + "acc,none": 0.659556313993174, + "acc_stderr,none": 0.013847460518892973, + "acc_norm,none": 0.6655290102389079, + "acc_norm_stderr,none": 0.013787460322441372, + "alias": "arc_challenge" + }, + "harness|lambada:openai|0": { + "perplexity,none": 5.498205577502831, + "perplexity_stderr,none": 0.1438667511133427, + "acc,none": 0.48166116825150396, + "acc_stderr,none": 0.006961290586136397, + "alias": "lambada_openai" + }, + "harness|openbookqa|0": { + "acc,none": 0.4, + "acc_stderr,none": 0.021930844120728505, + "acc_norm,none": 0.488, + "acc_norm_stderr,none": 0.02237662679792717, + "alias": "openbookqa" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.7543078040959663, + "acc_stderr,none": 0.014116813297013678, + "alias": "truthfulqa_mc2" + }, + "harness|arc:easy|0": { + "acc,none": 0.8434343434343434, + "acc_stderr,none": 0.0074566212681594565, + "acc_norm,none": 0.7845117845117845, + "acc_norm_stderr,none": 0.008436837633389658, + "alias": "arc_easy" + } + }, + "task_info": { + "model": "Nan-Do/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B-GGUF", + "revision": "main", + "private": false, + "params": null, + "architectures": "?", + "quant_type": "llama.cpp", + "precision": "4bit", + "model_params": null, + "model_size": null, + "weight_dtype": "int4", + "compute_dtype": "bfloat16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "cpu", + "status": "Pending", + "submitted_time": "2024-05-18T02:32:17Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "llama_cpp" + }, + "quantization_config": { + "quant_method": "llama.cpp", + "ftype": "*Q4_0.gguf" + }, + "versions": { + "harness|hellaswag|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|boolq|0": 2.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|piqa|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|arc:challenge|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|arc:easy|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1716158716.7148077, + "config": { + "model": "WrapperGGUFLM", + "model_args": "gguf_model=Nan-Do/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B-GGUF,ftype=*Q4_0.gguf,dtype=bfloat16,_commit_hash=main", + "batch_size": 1, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/PrunaAI/results_2024-05-13-21-54-31.json b/PrunaAI/results_2024-05-13-21-54-31.json new file mode 100644 index 0000000000000000000000000000000000000000..d81a34d5f38294d449c185d2bff89656fa5d720d --- /dev/null +++ b/PrunaAI/results_2024-05-13-21-54-31.json @@ -0,0 +1,579 @@ +{ + "config_general": { + "lighteval_sha": "no", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-13-21-54-31", + "total_evaluation_time_secondes": "", + "model_name": "PrunaAI/Phi-3-mini-128k-instruct-GGUF-Imatrix-smashed", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 2.181729792, + "model_params": 3.821079552, + "quant_type": "llama.cpp", + "precision": "4bit" + }, + "results": { + "harness|hellaswag|0": { + "acc,none": 0.5686118303126867, + "acc_stderr,none": 0.004942578520987334, + "acc_norm,none": 0.752141007767377, + "acc_norm_stderr,none": 0.004308870978210358, + "alias": "hellaswag" + }, + "harness|mmlu|0": { + "acc,none": 0.5713573565019228, + "acc_stderr,none": 0.003994070978563716, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5270988310308182, + "acc_stderr,none": 0.007098258374624685 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.47619047619047616, + "acc_stderr,none": 0.04467062628403273 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.46060606060606063, + "acc_stderr,none": 0.03892207016552013 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.6029411764705882, + "acc_stderr,none": 0.03434131164719128 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.5274261603375527, + "acc_stderr,none": 0.03249822718301303 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.7851239669421488, + "acc_stderr,none": 0.03749492448709699 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.6018518518518519, + "acc_stderr,none": 0.04732332615978814 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7177914110429447, + "acc_stderr,none": 0.03536117886664742 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.5982658959537572, + "acc_stderr,none": 0.026394104177643627 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.41899441340782123, + "acc_stderr,none": 0.016501579306861677 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6302250803858521, + "acc_stderr,none": 0.027417996705630998 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.6944444444444444, + "acc_stderr,none": 0.025630824975621344 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.4471968709256845, + "acc_stderr,none": 0.012698825252435111 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7602339181286549, + "acc_stderr,none": 0.03274485211946956 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.638236240746701, + "acc_stderr,none": 0.008388402876897084 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.64, + "acc_stderr,none": 0.048241815132442176 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.630188679245283, + "acc_stderr,none": 0.029711421880107933 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.5491329479768786, + "acc_stderr,none": 0.03794012674697028 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.39, + "acc_stderr,none": 0.04902071300001975 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6098654708520179, + "acc_stderr,none": 0.03273766725459156 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.7766990291262136, + "acc_stderr,none": 0.04123553189891431 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8418803418803419, + "acc_stderr,none": 0.023902325549560417 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.66, + "acc_stderr,none": 0.04760952285695238 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.7458492975734355, + "acc_stderr,none": 0.01556925469204577 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.5980392156862745, + "acc_stderr,none": 0.02807415894760066 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.5035460992907801, + "acc_stderr,none": 0.02982674915328092 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.5661764705882353, + "acc_stderr,none": 0.030105636570016636 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.4578313253012048, + "acc_stderr,none": 0.0387862677100236 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.68020799480013, + "acc_stderr,none": 0.008161750542677104 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.34210526315789475, + "acc_stderr,none": 0.04462917535336937 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7121212121212122, + "acc_stderr,none": 0.03225883512300992 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8134715025906736, + "acc_stderr,none": 0.028112091210117474 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.6358974358974359, + "acc_stderr,none": 0.02439667298509477 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.6596638655462185, + "acc_stderr,none": 0.030778057422931673 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.8256880733944955, + "acc_stderr,none": 0.016265675632010354 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.5725190839694656, + "acc_stderr,none": 0.04338920305792401 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.6405228758169934, + "acc_stderr,none": 0.01941253924203216 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6090909090909091, + "acc_stderr,none": 0.04673752333670238 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.5306122448979592, + "acc_stderr,none": 0.031949171367580624 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.7960199004975125, + "acc_stderr,none": 0.02849317624532608 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.77, + "acc_stderr,none": 0.042295258468165044 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.4652711703139867, + "acc_stderr,none": 0.008511108943898386 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.27, + "acc_stderr,none": 0.044619604333847394 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.5777777777777777, + "acc_stderr,none": 0.04266763404099582 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.6644736842105263, + "acc_stderr,none": 0.03842498559395267 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.6666666666666666, + "acc_stderr,none": 0.039420826399272135 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.42, + "acc_stderr,none": 0.049604496374885836 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.4, + "acc_stderr,none": 0.04923659639173309 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.25, + "acc_stderr,none": 0.04351941398892446 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.35294117647058826, + "acc_stderr,none": 0.04755129616062946 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.65, + "acc_stderr,none": 0.047937248544110196 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.5319148936170213, + "acc_stderr,none": 0.03261936918467382 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.4206896551724138, + "acc_stderr,none": 0.0411391498118926 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.35978835978835977, + "acc_stderr,none": 0.02471807594412928 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.6935483870967742, + "acc_stderr,none": 0.02622648565255388 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.4827586206896552, + "acc_stderr,none": 0.035158955511657 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.61, + "acc_stderr,none": 0.04902071300001974 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.22962962962962963, + "acc_stderr,none": 0.025644108639267624 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.3443708609271523, + "acc_stderr,none": 0.03879687024073327 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.46296296296296297, + "acc_stderr,none": 0.03400603625538272 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.41964285714285715, + "acc_stderr,none": 0.04684099321077106 + }, + "harness|boolq|0": { + "acc,none": 0.8510703363914373, + "acc_stderr,none": 0.006226813679382005, + "alias": "boolq" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.5298984108646814, + "acc_stderr,none": 0.015543848691376448, + "alias": "truthfulqa_mc2" + }, + "harness|arc:easy|0": { + "acc,none": 0.7916666666666666, + "acc_stderr,none": 0.008333333333333191, + "acc_norm,none": 0.7714646464646465, + "acc_norm_stderr,none": 0.008615944722488503, + "alias": "arc_easy" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.37576499388004897, + "acc_stderr,none": 0.016954584060214297, + "alias": "truthfulqa_mc1" + }, + "harness|openbookqa|0": { + "acc,none": 0.342, + "acc_stderr,none": 0.021236147199899254, + "acc_norm,none": 0.422, + "acc_norm_stderr,none": 0.022109039310618552, + "alias": "openbookqa" + }, + "harness|lambada:openai|0": { + "perplexity,none": 6.567239054538494, + "perplexity_stderr,none": 0.1852860668326811, + "acc,none": 0.3089462449058801, + "acc_stderr,none": 0.006437384484045087, + "alias": "lambada_openai" + }, + "harness|piqa|0": { + "acc,none": 0.7905331882480957, + "acc_stderr,none": 0.009494302979819803, + "acc_norm,none": 0.7916213275299239, + "acc_norm_stderr,none": 0.009476125383049464, + "alias": "piqa" + }, + "harness|arc:challenge|0": { + "acc,none": 0.5315699658703071, + "acc_stderr,none": 0.014582236460866978, + "acc_norm,none": 0.5443686006825939, + "acc_norm_stderr,none": 0.014553749939306868, + "alias": "arc_challenge" + }, + "harness|winogrande|0": { + "acc,none": 0.7316495659037096, + "acc_stderr,none": 0.012453340359561195, + "alias": "winogrande" + } + }, + "task_info": { + "model": "PrunaAI/Phi-3-mini-128k-instruct-GGUF-Imatrix-smashed", + "revision": "main", + "private": false, + "params": null, + "architectures": "?", + "quant_type": "llama.cpp", + "precision": "4bit", + "model_params": null, + "model_size": null, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "cpu", + "status": "Pending", + "submitted_time": "2024-05-10T07:32:46Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "llama_cpp" + }, + "quantization_config": { + "quant_method": "llama.cpp", + "ftype": "*Q4_0.gguf" + }, + "versions": { + "harness|hellaswag|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|boolq|0": 2.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|arc:easy|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|openbookqa|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|piqa|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|winogrande|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715589898.9681585, + "config": { + "model": "WrapperGGUFLM", + "model_args": "gguf_model=PrunaAI/Phi-3-mini-128k-instruct-GGUF-Imatrix-smashed,ftype=*Q4_0.gguf,dtype=float16,_commit_hash=main", + "batch_size": 1, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/Qwen/results_2024-04-26-20-25-12.json b/Qwen/results_2024-04-26-20-25-12.json new file mode 100644 index 0000000000000000000000000000000000000000..406a24d172ba232a406e4d942015c222ff847d1f --- /dev/null +++ b/Qwen/results_2024-04-26-20-25-12.json @@ -0,0 +1,599 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-04-26-20-25-12", + "total_evaluation_time_secondes": "", + "model_name": "Qwen/Qwen1.5-0.5B-Chat-GPTQ-Int4", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 0.47, + "model_params": 0.31, + "quant_type": "GPTQ", + "precision": "4bit" + }, + "results": { + "harness|hellaswag|0": { + "acc,none": 0.3571997610037841, + "acc_stderr,none": 0.004781950883460504, + "acc_norm,none": 0.4403505277833101, + "acc_norm_stderr,none": 0.004954146286513353, + "alias": "hellaswag" + }, + "harness|mmlu|0": { + "acc,none": 0.3174049280729241, + "acc_stderr,none": 0.0038919020479297904, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.32709883103081827, + "acc_stderr,none": 0.0067828905413636915 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.29365079365079366, + "acc_stderr,none": 0.040735243221471276 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.4666666666666667, + "acc_stderr,none": 0.03895658065271847 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.3627450980392157, + "acc_stderr,none": 0.03374499356319354 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.45569620253164556, + "acc_stderr,none": 0.032419206846933335 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.45454545454545453, + "acc_stderr,none": 0.045454545454545456 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.4166666666666667, + "acc_stderr,none": 0.04766075165356461 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.39263803680981596, + "acc_stderr,none": 0.03836740907831029 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.37572254335260113, + "acc_stderr,none": 0.026074314851657083 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.23910614525139665, + "acc_stderr,none": 0.014265554192331154 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.3440514469453376, + "acc_stderr,none": 0.026981478043648047 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.3395061728395062, + "acc_stderr,none": 0.026348564412011624 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.30182529335071706, + "acc_stderr,none": 0.01172435051810589 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.3216374269005848, + "acc_stderr,none": 0.03582529442573122 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.35017701963308656, + "acc_stderr,none": 0.008463796213785452 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.43, + "acc_stderr,none": 0.049756985195624284 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.30943396226415093, + "acc_stderr,none": 0.028450154794118627 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.28901734104046245, + "acc_stderr,none": 0.034564257450869995 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.3632286995515695, + "acc_stderr,none": 0.03227790442850499 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.42718446601941745, + "acc_stderr,none": 0.04897957737781168 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.47435897435897434, + "acc_stderr,none": 0.03271298896811159 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.44, + "acc_stderr,none": 0.049888765156985884 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.39080459770114945, + "acc_stderr,none": 0.01744836606706253 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.3790849673202614, + "acc_stderr,none": 0.027780141207023355 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.2907801418439716, + "acc_stderr,none": 0.027090664368353178 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.17279411764705882, + "acc_stderr,none": 0.02296606758558178 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.3132530120481928, + "acc_stderr,none": 0.03610805018031024 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.303542411439714, + "acc_stderr,none": 0.008223945690983177 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.19298245614035087, + "acc_stderr,none": 0.03712454853721368 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.29292929292929293, + "acc_stderr,none": 0.032424979581788145 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.2538860103626943, + "acc_stderr,none": 0.03141024780565317 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.28205128205128205, + "acc_stderr,none": 0.022815813098896597 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.24789915966386555, + "acc_stderr,none": 0.028047967224176892 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.3302752293577982, + "acc_stderr,none": 0.02016446633634298 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.44274809160305345, + "acc_stderr,none": 0.043564472026650695 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.2957516339869281, + "acc_stderr,none": 0.018463154132632817 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.36363636363636365, + "acc_stderr,none": 0.04607582090719976 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.20816326530612245, + "acc_stderr,none": 0.02599111767281329 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.39800995024875624, + "acc_stderr,none": 0.03461199429040013 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.46, + "acc_stderr,none": 0.05009082659620332 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.28417380272756104, + "acc_stderr,none": 0.008000179731584128 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.27, + "acc_stderr,none": 0.044619604333847394 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.34074074074074073, + "acc_stderr,none": 0.04094376269996793 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.29605263157894735, + "acc_stderr,none": 0.037150621549989056 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.2986111111111111, + "acc_stderr,none": 0.03827052357950756 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.2, + "acc_stderr,none": 0.04020151261036845 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.37, + "acc_stderr,none": 0.04852365870939099 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.28, + "acc_stderr,none": 0.045126085985421276 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.24509803921568626, + "acc_stderr,none": 0.042801058373643966 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.38, + "acc_stderr,none": 0.04878317312145633 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.25957446808510637, + "acc_stderr,none": 0.02865917937429232 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.36551724137931035, + "acc_stderr,none": 0.04013124195424386 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.3253968253968254, + "acc_stderr,none": 0.024130158299762613 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.31290322580645163, + "acc_stderr,none": 0.02637756702864586 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.24630541871921183, + "acc_stderr,none": 0.030315099285617736 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.35, + "acc_stderr,none": 0.0479372485441102 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.22592592592592592, + "acc_stderr,none": 0.02549753263960954 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.2251655629139073, + "acc_stderr,none": 0.03410435282008937 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.19444444444444445, + "acc_stderr,none": 0.02699145450203673 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.2767857142857143, + "acc_stderr,none": 0.042466243366976256 + }, + "harness|openbookqa|0": { + "acc,none": 0.194, + "acc_stderr,none": 0.017701827855304608, + "acc_norm,none": 0.304, + "acc_norm_stderr,none": 0.020591649571224932, + "alias": "openbookqa" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.42460255683280484, + "acc_stderr,none": 0.015083696980807306, + "alias": "truthfulqa_mc2" + }, + "harness|boolq|0": { + "acc,none": 0.41039755351681956, + "acc_stderr,none": 0.008603488048617516, + "alias": "boolq" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.26193390452876375, + "acc_stderr,none": 0.015392118805015023, + "alias": "truthfulqa_mc1" + }, + "harness|piqa|0": { + "acc,none": 0.6746463547334058, + "acc_stderr,none": 0.010931036623525191, + "acc_norm,none": 0.6632208922742111, + "acc_norm_stderr,none": 0.01102673892525118, + "alias": "piqa" + }, + "harness|arc:easy|0": { + "acc,none": 0.5214646464646465, + "acc_stderr,none": 0.010250325159456659, + "acc_norm,none": 0.4772727272727273, + "acc_norm_stderr,none": 0.01024917909060598, + "alias": "arc_easy" + }, + "harness|lambada:openai|0": { + "perplexity,none": 32.89382650895189, + "perplexity_stderr,none": 1.6153213247657885, + "acc,none": 0.39782650882980786, + "acc_stderr,none": 0.0068189852131081774, + "alias": "lambada_openai" + }, + "harness|winogrande|0": { + "acc,none": 0.5509076558800315, + "acc_stderr,none": 0.01397945938914085, + "alias": "winogrande" + }, + "harness|arc:challenge|0": { + "acc,none": 0.25426621160409557, + "acc_stderr,none": 0.012724999945157744, + "acc_norm,none": 0.2738907849829352, + "acc_norm_stderr,none": 0.013032004972989505, + "alias": "arc_challenge" + } + }, + "task_info": { + "model": "Qwen/Qwen1.5-0.5B-Chat-GPTQ-Int4", + "revision": "main", + "private": false, + "params": 0.5, + "architectures": "Qwen2ForCausalLM", + "quant_type": "GPTQ", + "precision": "4bit", + "model_params": 0.5, + "model_size": 0.25, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-26T09:50:00Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "batch_size": 1, + "bits": 4, + "block_name_to_quantize": null, + "cache_block_outputs": true, + "damp_percent": 0.01, + "dataset": null, + "desc_act": false, + "exllama_config": { + "version": 1 + }, + "group_size": 128, + "max_input_length": null, + "model_seqlen": null, + "module_name_preceding_first_block": null, + "modules_in_block_to_quantize": null, + "pad_token_id": null, + "quant_method": "gptq", + "sym": true, + "tokenizer": null, + "true_sequential": true, + "use_cuda_fp16": false, + "use_exllama": true + }, + "versions": { + "harness|hellaswag|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|openbookqa|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|boolq|0": 2.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|piqa|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|arc:challenge|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714132429.364645, + "config": { + "model": "hf", + "model_args": "pretrained=Qwen/Qwen1.5-0.5B-Chat-GPTQ-Int4,dtype=float16,_commit_hash=main", + "batch_size": 4, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/Qwen/results_2024-04-26-21-31-31.json b/Qwen/results_2024-04-26-21-31-31.json new file mode 100644 index 0000000000000000000000000000000000000000..2efdc7f97b2a4d5e6ca244f019f3853acc8d850f --- /dev/null +++ b/Qwen/results_2024-04-26-21-31-31.json @@ -0,0 +1,583 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-04-26-21-31-31", + "total_evaluation_time_secondes": "", + "model_name": "Qwen/Qwen1.5-0.5B-Chat-AWQ", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 0.78, + "model_params": 0.31, + "quant_type": "AWQ", + "precision": "4bit" + }, + "results": { + "harness|piqa|0": { + "acc,none": 0.6681175190424374, + "acc_stderr,none": 0.010986617776361585, + "acc_norm,none": 0.6653971708378672, + "acc_norm_stderr,none": 0.011009071725162507, + "alias": "piqa" + }, + "harness|openbookqa|0": { + "acc,none": 0.192, + "acc_stderr,none": 0.017632180454361004, + "acc_norm,none": 0.31, + "acc_norm_stderr,none": 0.020704041021724805, + "alias": "openbookqa" + }, + "harness|arc:easy|0": { + "acc,none": 0.468013468013468, + "acc_stderr,none": 0.010238767643185712, + "acc_norm,none": 0.42297979797979796, + "acc_norm_stderr,none": 0.0101373283822091, + "alias": "arc_easy" + }, + "harness|arc:challenge|0": { + "acc,none": 0.24146757679180889, + "acc_stderr,none": 0.012506564839739432, + "acc_norm,none": 0.26791808873720135, + "acc_norm_stderr,none": 0.01294203019513643, + "alias": "arc_challenge" + }, + "harness|lambada:openai|0": { + "perplexity,none": 37.145150113999286, + "perplexity_stderr,none": 1.8837992772497014, + "acc,none": 0.390064040364836, + "acc_stderr,none": 0.006795511465879196, + "alias": "lambada_openai" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.2423500611995104, + "acc_stderr,none": 0.015000674373570345, + "alias": "truthfulqa_mc1" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.4158103928931979, + "acc_stderr,none": 0.015056624356974616, + "alias": "truthfulqa_mc2" + }, + "harness|hellaswag|0": { + "acc,none": 0.3599880501892053, + "acc_stderr,none": 0.004790155370993443, + "acc_norm,none": 0.4409480183230432, + "acc_norm_stderr,none": 0.0049548591067816485, + "alias": "hellaswag" + }, + "harness|winogrande|0": { + "acc,none": 0.5343330702446725, + "acc_stderr,none": 0.014019317531542563, + "alias": "winogrande" + }, + "harness|mmlu|0": { + "acc,none": 0.306010539809144, + "acc_stderr,none": 0.0038620141807728598, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.29798087141339, + "acc_stderr,none": 0.006634991308715303 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.30158730158730157, + "acc_stderr,none": 0.04104947269903394 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.36363636363636365, + "acc_stderr,none": 0.03756335775187897 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.3088235294117647, + "acc_stderr,none": 0.03242661719827218 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.379746835443038, + "acc_stderr,none": 0.031591887529658504 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.4628099173553719, + "acc_stderr,none": 0.04551711196104218 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.3888888888888889, + "acc_stderr,none": 0.047128212574267705 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.3067484662576687, + "acc_stderr,none": 0.03623089915724146 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.3468208092485549, + "acc_stderr,none": 0.025624723994030454 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.23798882681564246, + "acc_stderr,none": 0.014242630070574885 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.3247588424437299, + "acc_stderr,none": 0.026596782287697046 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.33024691358024694, + "acc_stderr,none": 0.026168298456732846 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.26988265971316816, + "acc_stderr,none": 0.011337381084250397 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.2807017543859649, + "acc_stderr,none": 0.034462962170884265 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.3447055037013196, + "acc_stderr,none": 0.008436499399115982 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.39, + "acc_stderr,none": 0.04902071300001975 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.30566037735849055, + "acc_stderr,none": 0.028353298073322666 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.2947976878612717, + "acc_stderr,none": 0.03476599607516479 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.25, + "acc_stderr,none": 0.04351941398892446 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.3901345291479821, + "acc_stderr,none": 0.03273766725459156 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.3883495145631068, + "acc_stderr,none": 0.0482572933735639 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.4658119658119658, + "acc_stderr,none": 0.03267942734081228 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.43, + "acc_stderr,none": 0.049756985195624284 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.388250319284802, + "acc_stderr,none": 0.017427673295544337 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.38235294117647056, + "acc_stderr,none": 0.027826109307283704 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.24113475177304963, + "acc_stderr,none": 0.025518731049537773 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.19852941176470587, + "acc_stderr,none": 0.024231013370541114 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.3192771084337349, + "acc_stderr,none": 0.0362933532994786 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.30289242768930774, + "acc_stderr,none": 0.008243456801615645 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.23684210526315788, + "acc_stderr,none": 0.03999423879281335 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.30303030303030304, + "acc_stderr,none": 0.03274287914026867 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.26424870466321243, + "acc_stderr,none": 0.031821550509166484 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.28205128205128205, + "acc_stderr,none": 0.022815813098896603 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.2647058823529412, + "acc_stderr,none": 0.028657491285071973 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.326605504587156, + "acc_stderr,none": 0.020106990889937303 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.45038167938931295, + "acc_stderr,none": 0.04363643698524779 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.29248366013071897, + "acc_stderr,none": 0.01840341571010979 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.36363636363636365, + "acc_stderr,none": 0.04607582090719976 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.21224489795918366, + "acc_stderr,none": 0.026176967197866767 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.35323383084577115, + "acc_stderr,none": 0.03379790611796777 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.42, + "acc_stderr,none": 0.04960449637488583 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.28290516967967017, + "acc_stderr,none": 0.00799797732902615 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.31, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.3333333333333333, + "acc_stderr,none": 0.04072314811876837 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.2631578947368421, + "acc_stderr,none": 0.03583496176361063 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.2847222222222222, + "acc_stderr,none": 0.03773809990686934 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.21, + "acc_stderr,none": 0.040936018074033256 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.4, + "acc_stderr,none": 0.04923659639173309 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.33, + "acc_stderr,none": 0.047258156262526045 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.30392156862745096, + "acc_stderr,none": 0.045766654032077636 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.38, + "acc_stderr,none": 0.04878317312145633 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.2680851063829787, + "acc_stderr,none": 0.028957342788342347 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.33793103448275863, + "acc_stderr,none": 0.039417076320648906 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.2671957671957672, + "acc_stderr,none": 0.022789673145776575 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.3193548387096774, + "acc_stderr,none": 0.026522709674667765 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.22660098522167488, + "acc_stderr,none": 0.029454863835292975 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.35, + "acc_stderr,none": 0.047937248544110196 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.23333333333333334, + "acc_stderr,none": 0.025787874220959316 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.2847682119205298, + "acc_stderr,none": 0.03684881521389023 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.2175925925925926, + "acc_stderr,none": 0.028139689444859683 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.23214285714285715, + "acc_stderr,none": 0.04007341809755805 + }, + "harness|boolq|0": { + "acc,none": 0.40397553516819573, + "acc_stderr,none": 0.008582268854021396, + "alias": "boolq" + } + }, + "task_info": { + "model": "Qwen/Qwen1.5-0.5B-Chat-AWQ", + "revision": "main", + "private": false, + "params": 1.412, + "architectures": "Qwen2ForCausalLM", + "quant_type": "AWQ", + "precision": "4bit", + "model_params": 2.824, + "model_size": 1.412, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-26T12:26:52Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "bits": 4, + "group_size": 128, + "modules_to_not_convert": null, + "quant_method": "awq", + "version": "gemm", + "zero_point": true + }, + "versions": { + "harness|piqa|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|hellaswag|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|boolq|0": 2.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714134556.4389935, + "config": { + "model": "hf", + "model_args": "pretrained=Qwen/Qwen1.5-0.5B-Chat-AWQ,dtype=float16,_commit_hash=main", + "batch_size": 4, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/Qwen/results_2024-04-28-05-05-52.json b/Qwen/results_2024-04-28-05-05-52.json new file mode 100644 index 0000000000000000000000000000000000000000..13218e016d05211aa8ae0a7361c8526d94026523 --- /dev/null +++ b/Qwen/results_2024-04-28-05-05-52.json @@ -0,0 +1,583 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-04-28-05-05-52", + "total_evaluation_time_secondes": "", + "model_name": "Qwen/Qwen1.5-7B-Chat-AWQ", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 5.86, + "model_params": 6.53, + "quant_type": "AWQ", + "precision": "4bit" + }, + "results": { + "harness|winogrande|0": { + "acc,none": 0.6527229676400947, + "acc_stderr,none": 0.013380909249751237, + "alias": "winogrande" + }, + "harness|boolq|0": { + "acc,none": 0.8366972477064221, + "acc_stderr,none": 0.006465073432190007, + "alias": "boolq" + }, + "harness|piqa|0": { + "acc,none": 0.7388465723612623, + "acc_stderr,none": 0.01024873864993557, + "acc_norm,none": 0.7426550598476604, + "acc_norm_stderr,none": 0.010199921064792512, + "alias": "piqa" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.5316967791518732, + "acc_stderr,none": 0.015886945830206962, + "alias": "truthfulqa_mc2" + }, + "harness|arc:challenge|0": { + "acc,none": 0.4274744027303754, + "acc_stderr,none": 0.014456862944650649, + "acc_norm,none": 0.45307167235494883, + "acc_norm_stderr,none": 0.014546892052005628, + "alias": "arc_challenge" + }, + "harness|arc:easy|0": { + "acc,none": 0.6839225589225589, + "acc_stderr,none": 0.009540440071928294, + "acc_norm,none": 0.6241582491582491, + "acc_norm_stderr,none": 0.009938436373170633, + "alias": "arc_easy" + }, + "harness|hellaswag|0": { + "acc,none": 0.5802628958374826, + "acc_stderr,none": 0.004925072159723824, + "acc_norm,none": 0.7638916550487951, + "acc_norm_stderr,none": 0.004238215815533083, + "alias": "hellaswag" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.3659730722154223, + "acc_stderr,none": 0.01686294168408839, + "alias": "truthfulqa_mc1" + }, + "harness|mmlu|0": { + "acc,none": 0.5952143569292123, + "acc_stderr,none": 0.00396669659542448, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5460148777895856, + "acc_stderr,none": 0.006937437849855694 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.5, + "acc_stderr,none": 0.04472135954999579 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7575757575757576, + "acc_stderr,none": 0.03346409881055953 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.7549019607843137, + "acc_stderr,none": 0.030190282453501954 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.7721518987341772, + "acc_stderr,none": 0.02730348459906942 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.7024793388429752, + "acc_stderr,none": 0.04173349148083499 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7592592592592593, + "acc_stderr,none": 0.04133119440243838 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.6748466257668712, + "acc_stderr,none": 0.03680350371286464 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.6647398843930635, + "acc_stderr,none": 0.025416003773165555 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.36983240223463687, + "acc_stderr,none": 0.016145881256056212 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6752411575562701, + "acc_stderr,none": 0.02659678228769705 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.6358024691358025, + "acc_stderr,none": 0.026774929899722313 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.4348109517601043, + "acc_stderr,none": 0.012661233805616307 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7192982456140351, + "acc_stderr,none": 0.034462962170884265 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.6623752816221435, + "acc_stderr,none": 0.008215126567884317 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.62, + "acc_stderr,none": 0.04878317312145632 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.6792452830188679, + "acc_stderr,none": 0.028727502957880263 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.5664739884393064, + "acc_stderr,none": 0.03778621079092056 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.41, + "acc_stderr,none": 0.049431107042371025 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6547085201793722, + "acc_stderr,none": 0.03191100192835794 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.7572815533980582, + "acc_stderr,none": 0.04245022486384495 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8675213675213675, + "acc_stderr,none": 0.02220930907316562 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.7, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.7598978288633461, + "acc_stderr,none": 0.015274685213734191 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.6830065359477124, + "acc_stderr,none": 0.026643278474508755 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.4397163120567376, + "acc_stderr,none": 0.029609912075594113 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.6286764705882353, + "acc_stderr,none": 0.02934980313976587 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.4879518072289157, + "acc_stderr,none": 0.038913644958358196 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.6789080272993175, + "acc_stderr,none": 0.008213450995422493 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.43859649122807015, + "acc_stderr,none": 0.04668000738510455 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7727272727272727, + "acc_stderr,none": 0.02985751567338641 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.7927461139896373, + "acc_stderr,none": 0.02925282329180363 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.5923076923076923, + "acc_stderr,none": 0.024915243985987847 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.6470588235294118, + "acc_stderr,none": 0.031041941304059285 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.8018348623853211, + "acc_stderr,none": 0.01709057380421789 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.7022900763358778, + "acc_stderr,none": 0.04010358942462203 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.5637254901960784, + "acc_stderr,none": 0.02006287424353913 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6272727272727273, + "acc_stderr,none": 0.04631381319425465 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.6857142857142857, + "acc_stderr,none": 0.02971932942241747 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.7661691542288557, + "acc_stderr,none": 0.029929415408348398 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.83, + "acc_stderr,none": 0.03775251680686371 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.5207738661592134, + "acc_stderr,none": 0.008667800862507506 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.45, + "acc_stderr,none": 0.049999999999999996 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.5185185185185185, + "acc_stderr,none": 0.043163785995113245 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.631578947368421, + "acc_stderr,none": 0.03925523381052932 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.6180555555555556, + "acc_stderr,none": 0.040629907841466674 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.41, + "acc_stderr,none": 0.049431107042371025 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.58, + "acc_stderr,none": 0.04960449637488583 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.38, + "acc_stderr,none": 0.04878317312145633 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.38235294117647056, + "acc_stderr,none": 0.04835503696107224 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.76, + "acc_stderr,none": 0.042923469599092816 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.5106382978723404, + "acc_stderr,none": 0.03267862331014063 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5379310344827586, + "acc_stderr,none": 0.04154659671707548 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.47354497354497355, + "acc_stderr,none": 0.02571523981134675 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7161290322580646, + "acc_stderr,none": 0.02564938106302926 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.5320197044334976, + "acc_stderr,none": 0.035107665979592154 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.69, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.3333333333333333, + "acc_stderr,none": 0.028742040903948496 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.39072847682119205, + "acc_stderr,none": 0.03983798306659807 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.5555555555555556, + "acc_stderr,none": 0.03388857118502325 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.4017857142857143, + "acc_stderr,none": 0.04653333146973646 + }, + "harness|openbookqa|0": { + "acc,none": 0.318, + "acc_stderr,none": 0.020847571620814014, + "acc_norm,none": 0.432, + "acc_norm_stderr,none": 0.02217510926561316, + "alias": "openbookqa" + }, + "harness|lambada:openai|0": { + "perplexity,none": 5.595127359503151, + "perplexity_stderr,none": 0.19155346558182795, + "acc,none": 0.6083834659421696, + "acc_stderr,none": 0.006800350287698186, + "alias": "lambada_openai" + } + }, + "task_info": { + "model": "Qwen/Qwen1.5-7B-Chat-AWQ", + "revision": "main", + "private": false, + "params": 8.448, + "architectures": "Qwen2ForCausalLM", + "quant_type": "AWQ", + "precision": "4bit", + "model_params": 16.896, + "model_size": 8.448, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-27T08:04:58Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "bits": 4, + "group_size": 128, + "modules_to_not_convert": null, + "quant_method": "awq", + "version": "gemm", + "zero_point": true + }, + "versions": { + "harness|winogrande|0": 1.0, + "harness|boolq|0": 2.0, + "harness|piqa|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|arc:challenge|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|openbookqa|0": 1.0, + "harness|lambada:openai|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714230477.5001445, + "config": { + "model": "hf", + "model_args": "pretrained=Qwen/Qwen1.5-7B-Chat-AWQ,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/Qwen/results_2024-04-28-13-06-41.json b/Qwen/results_2024-04-28-13-06-41.json new file mode 100644 index 0000000000000000000000000000000000000000..3465ec83b7dc56f3a6e1d6ee3661aa616c75946c --- /dev/null +++ b/Qwen/results_2024-04-28-13-06-41.json @@ -0,0 +1,599 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-04-28-13-06-41", + "total_evaluation_time_secondes": "", + "model_name": "Qwen/Qwen1.5-7B-Chat-GPTQ-Int4", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 5.86, + "model_params": 6.54, + "quant_type": "GPTQ", + "precision": "4bit" + }, + "results": { + "harness|piqa|0": { + "acc,none": 0.735038084874864, + "acc_stderr,none": 0.01029655799331606, + "acc_norm,none": 0.7453754080522307, + "acc_norm_stderr,none": 0.010164432237060476, + "alias": "piqa" + }, + "harness|openbookqa|0": { + "acc,none": 0.332, + "acc_stderr,none": 0.021081766571222852, + "acc_norm,none": 0.416, + "acc_norm_stderr,none": 0.022064943313928876, + "alias": "openbookqa" + }, + "harness|arc:easy|0": { + "acc,none": 0.6687710437710438, + "acc_stderr,none": 0.009657641311350919, + "acc_norm,none": 0.6056397306397306, + "acc_norm_stderr,none": 0.010028176038393004, + "alias": "arc_easy" + }, + "harness|mmlu|0": { + "acc,none": 0.5904429568437545, + "acc_stderr,none": 0.003957317221865859, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5377258235919234, + "acc_stderr,none": 0.0068626131961731455 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.4523809523809524, + "acc_stderr,none": 0.044518079590553275 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7696969696969697, + "acc_stderr,none": 0.0328766675860349 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.7549019607843137, + "acc_stderr,none": 0.03019028245350194 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.7510548523206751, + "acc_stderr,none": 0.028146970599422644 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.768595041322314, + "acc_stderr,none": 0.038498560987940904 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7685185185185185, + "acc_stderr,none": 0.04077494709252627 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.656441717791411, + "acc_stderr,none": 0.03731133519673893 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.653179190751445, + "acc_stderr,none": 0.02562472399403046 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.29720670391061454, + "acc_stderr,none": 0.015285313353641592 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6591639871382636, + "acc_stderr,none": 0.026920841260776155 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.6481481481481481, + "acc_stderr,none": 0.026571483480719967 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.4517601043024772, + "acc_stderr,none": 0.012710662233660247 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7660818713450293, + "acc_stderr,none": 0.03246721765117826 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.6501448342452526, + "acc_stderr,none": 0.008309193510443607 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.65, + "acc_stderr,none": 0.0479372485441102 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.6830188679245283, + "acc_stderr,none": 0.028637235639800904 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.5606936416184971, + "acc_stderr,none": 0.037842719328874674 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.4, + "acc_stderr,none": 0.049236596391733084 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6098654708520179, + "acc_stderr,none": 0.03273766725459157 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.7281553398058253, + "acc_stderr,none": 0.044052680241409216 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8333333333333334, + "acc_stderr,none": 0.024414947304543674 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.69, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.7509578544061303, + "acc_stderr,none": 0.015464676163395972 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.6633986928104575, + "acc_stderr,none": 0.02705797462449438 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.4432624113475177, + "acc_stderr,none": 0.029634838473766006 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.6213235294117647, + "acc_stderr,none": 0.02946513363977613 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.463855421686747, + "acc_stderr,none": 0.03882310850890594 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.6821579460513487, + "acc_stderr,none": 0.008189081115990158 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.49122807017543857, + "acc_stderr,none": 0.04702880432049615 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7777777777777778, + "acc_stderr,none": 0.02962022787479048 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.7772020725388601, + "acc_stderr,none": 0.030031147977641538 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.5974358974358974, + "acc_stderr,none": 0.024864995159767752 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.6428571428571429, + "acc_stderr,none": 0.031124619309328177 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.8128440366972477, + "acc_stderr,none": 0.016722684526200148 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.6870229007633588, + "acc_stderr,none": 0.04066962905677697 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.553921568627451, + "acc_stderr,none": 0.020109864547181357 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6272727272727273, + "acc_stderr,none": 0.04631381319425464 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.7020408163265306, + "acc_stderr,none": 0.02927956741106567 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.7910447761194029, + "acc_stderr,none": 0.028748298931728655 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.81, + "acc_stderr,none": 0.039427724440366234 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.5207738661592134, + "acc_stderr,none": 0.008650288386489482 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.41, + "acc_stderr,none": 0.04943110704237101 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.5777777777777777, + "acc_stderr,none": 0.04266763404099582 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.631578947368421, + "acc_stderr,none": 0.039255233810529325 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.6527777777777778, + "acc_stderr,none": 0.039812405437178615 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.42, + "acc_stderr,none": 0.049604496374885836 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.58, + "acc_stderr,none": 0.049604496374885836 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.33, + "acc_stderr,none": 0.04725815626252603 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.4019607843137255, + "acc_stderr,none": 0.04878608714466997 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.74, + "acc_stderr,none": 0.0440844002276808 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.5361702127659574, + "acc_stderr,none": 0.03260038511835771 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5310344827586206, + "acc_stderr,none": 0.04158632762097828 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.4603174603174603, + "acc_stderr,none": 0.025670080636909193 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7193548387096774, + "acc_stderr,none": 0.0255606047210229 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.5320197044334976, + "acc_stderr,none": 0.03510766597959217 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.69, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.34444444444444444, + "acc_stderr,none": 0.02897264888484427 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.37748344370860926, + "acc_stderr,none": 0.03958027231121569 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.5324074074074074, + "acc_stderr,none": 0.03402801581358966 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.38392857142857145, + "acc_stderr,none": 0.04616143075028546 + }, + "harness|winogrande|0": { + "acc,none": 0.6432517758484609, + "acc_stderr,none": 0.01346339395802872, + "alias": "winogrande" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.541346036842384, + "acc_stderr,none": 0.01584861213215754, + "alias": "truthfulqa_mc2" + }, + "harness|arc:challenge|0": { + "acc,none": 0.41638225255972694, + "acc_stderr,none": 0.01440561827943617, + "acc_norm,none": 0.439419795221843, + "acc_norm_stderr,none": 0.014503747823580123, + "alias": "arc_challenge" + }, + "harness|boolq|0": { + "acc,none": 0.8403669724770643, + "acc_stderr,none": 0.006406021659710515, + "alias": "boolq" + }, + "harness|lambada:openai|0": { + "perplexity,none": 6.125395895964018, + "perplexity_stderr,none": 0.21476426826907466, + "acc,none": 0.5940228992819717, + "acc_stderr,none": 0.006841706431619957, + "alias": "lambada_openai" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.3806609547123623, + "acc_stderr,none": 0.016997627871907915, + "alias": "truthfulqa_mc1" + }, + "harness|hellaswag|0": { + "acc,none": 0.578868751244772, + "acc_stderr,none": 0.004927314729433566, + "acc_norm,none": 0.7615016928898626, + "acc_norm_stderr,none": 0.0042529434040930484, + "alias": "hellaswag" + } + }, + "task_info": { + "model": "Qwen/Qwen1.5-7B-Chat-GPTQ-Int4", + "revision": "main", + "private": false, + "params": 8.456, + "architectures": "Qwen2ForCausalLM", + "quant_type": "GPTQ", + "precision": "4bit", + "model_params": 16.912, + "model_size": 8.456, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-27T08:09:37Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "batch_size": 1, + "bits": 4, + "block_name_to_quantize": null, + "cache_block_outputs": true, + "damp_percent": 0.01, + "dataset": null, + "desc_act": false, + "exllama_config": { + "version": 1 + }, + "group_size": 128, + "max_input_length": null, + "model_seqlen": null, + "module_name_preceding_first_block": null, + "modules_in_block_to_quantize": null, + "pad_token_id": null, + "quant_method": "gptq", + "sym": true, + "tokenizer": null, + "true_sequential": true, + "use_cuda_fp16": false, + "use_exllama": true + }, + "versions": { + "harness|piqa|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|winogrande|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|arc:challenge|0": 1.0, + "harness|boolq|0": 2.0, + "harness|lambada:openai|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|hellaswag|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714275925.6022973, + "config": { + "model": "hf", + "model_args": "pretrained=Qwen/Qwen1.5-7B-Chat-GPTQ-Int4,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/Qwen/results_2024-05-05-22-20-44.json b/Qwen/results_2024-05-05-22-20-44.json new file mode 100644 index 0000000000000000000000000000000000000000..ab179f121c0d6388a4cc07cfbdd2dd8df062097d --- /dev/null +++ b/Qwen/results_2024-05-05-22-20-44.json @@ -0,0 +1,579 @@ +{ + "config_general": { + "lighteval_sha": "no", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-05-22-20-44", + "total_evaluation_time_secondes": "", + "model_name": "Qwen/Qwen1.5-0.5B-Chat-GGUF", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 0.389045248, + "model_params": 0.619570176, + "quant_type": "llama.cpp", + "precision": "4bit" + }, + "results": { + "harness|hellaswag|0": { + "acc,none": 0.3569010157339175, + "acc_stderr,none": 0.00478106139087391, + "acc_norm,none": 0.4396534554869548, + "acc_norm_stderr,none": 0.004953305461311763, + "alias": "hellaswag" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.23011015911872704, + "acc_stderr,none": 0.014734557959807762, + "alias": "truthfulqa_mc1" + }, + "harness|lambada:openai|0": { + "perplexity,none": 34.802753466476304, + "perplexity_stderr,none": 1.7695091328108965, + "acc,none": 0.26508829807878903, + "acc_stderr,none": 0.006149289402158153, + "alias": "lambada_openai" + }, + "harness|boolq|0": { + "acc,none": 0.3785932721712538, + "acc_stderr,none": 0.008483341718024479, + "alias": "boolq" + }, + "harness|winogrande|0": { + "acc,none": 0.5351223362273086, + "acc_stderr,none": 0.014017773120881587, + "alias": "winogrande" + }, + "harness|arc:easy|0": { + "acc,none": 0.37415824915824913, + "acc_stderr,none": 0.009929516948977625, + "acc_norm,none": 0.37247474747474746, + "acc_norm_stderr,none": 0.009920469215736012, + "alias": "arc_easy" + }, + "harness|openbookqa|0": { + "acc,none": 0.176, + "acc_stderr,none": 0.01704785202062227, + "acc_norm,none": 0.27, + "acc_norm_stderr,none": 0.019874354831287487, + "alias": "openbookqa" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.40641904896162573, + "acc_stderr,none": 0.014888528969774828, + "alias": "truthfulqa_mc2" + }, + "harness|mmlu|0": { + "acc,none": 0.2296681384418174, + "acc_stderr,none": 0.00354375915129246, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.24250797024442083, + "acc_stderr,none": 0.0062462231153999565 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.2857142857142857, + "acc_stderr,none": 0.04040610178208841 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.21818181818181817, + "acc_stderr,none": 0.03225078108306289 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.25, + "acc_stderr,none": 0.03039153369274154 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.270042194092827, + "acc_stderr,none": 0.028900721906293426 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.2396694214876033, + "acc_stderr,none": 0.03896878985070417 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.26851851851851855, + "acc_stderr,none": 0.04284467968052192 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.22085889570552147, + "acc_stderr,none": 0.032591773927421776 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.24855491329479767, + "acc_stderr,none": 0.023267528432100174 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.23798882681564246, + "acc_stderr,none": 0.014242630070574885 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.1864951768488746, + "acc_stderr,none": 0.02212243977248077 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.21604938271604937, + "acc_stderr,none": 0.022899162918445813 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.24641460234680573, + "acc_stderr,none": 0.011005971399927244 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.3216374269005848, + "acc_stderr,none": 0.03582529442573122 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.23978113936272932, + "acc_stderr,none": 0.00764225029165751 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.21509433962264152, + "acc_stderr,none": 0.025288394502891377 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.20809248554913296, + "acc_stderr,none": 0.030952890217749884 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.18, + "acc_stderr,none": 0.038612291966536955 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.31390134529147984, + "acc_stderr,none": 0.03114679648297246 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.17475728155339806, + "acc_stderr,none": 0.03760178006026621 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.2905982905982906, + "acc_stderr,none": 0.029745048572674057 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.23754789272030652, + "acc_stderr,none": 0.015218733046150195 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.22549019607843138, + "acc_stderr,none": 0.023929155517351284 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.23404255319148937, + "acc_stderr,none": 0.025257861359432407 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.18382352941176472, + "acc_stderr,none": 0.02352924218519311 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.28313253012048195, + "acc_stderr,none": 0.03507295431370518 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.21741956451088723, + "acc_stderr,none": 0.007433074431341764 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.23684210526315788, + "acc_stderr,none": 0.039994238792813386 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.17676767676767677, + "acc_stderr,none": 0.027178752639044915 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.20207253886010362, + "acc_stderr,none": 0.02897908979429673 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.20256410256410257, + "acc_stderr,none": 0.020377660970371397 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.21008403361344538, + "acc_stderr,none": 0.026461398717471874 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.1926605504587156, + "acc_stderr,none": 0.016909276884936073 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.2595419847328244, + "acc_stderr,none": 0.03844876139785271 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.25, + "acc_stderr,none": 0.01751781884501444 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.21818181818181817, + "acc_stderr,none": 0.03955932861795833 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.18775510204081633, + "acc_stderr,none": 0.02500025603954622 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.24378109452736318, + "acc_stderr,none": 0.030360490154014652 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.28, + "acc_stderr,none": 0.045126085985421276 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.21249603552172533, + "acc_stderr,none": 0.007271218700485502 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.22, + "acc_stderr,none": 0.04163331998932269 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.18518518518518517, + "acc_stderr,none": 0.03355677216313142 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.17763157894736842, + "acc_stderr,none": 0.031103182383123398 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.2569444444444444, + "acc_stderr,none": 0.03653946969442099 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.2, + "acc_stderr,none": 0.040201512610368445 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.26, + "acc_stderr,none": 0.044084400227680794 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.21, + "acc_stderr,none": 0.040936018074033256 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.21568627450980393, + "acc_stderr,none": 0.040925639582376556 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.28, + "acc_stderr,none": 0.045126085985421276 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.26382978723404255, + "acc_stderr,none": 0.02880998985410298 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.2413793103448276, + "acc_stderr,none": 0.03565998174135302 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.20899470899470898, + "acc_stderr,none": 0.020940481565334835 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.1774193548387097, + "acc_stderr,none": 0.021732540689329265 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.15270935960591134, + "acc_stderr,none": 0.025308904539380624 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.25, + "acc_stderr,none": 0.04351941398892446 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.2111111111111111, + "acc_stderr,none": 0.02488211685765508 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.1986754966887417, + "acc_stderr,none": 0.032578473844367746 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.1527777777777778, + "acc_stderr,none": 0.02453632602613422 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.3125, + "acc_stderr,none": 0.043994650575715215 + }, + "harness|arc:challenge|0": { + "acc,none": 0.23464163822525597, + "acc_stderr,none": 0.012383873560768664, + "acc_norm,none": 0.2738907849829352, + "acc_norm_stderr,none": 0.013032004972989503, + "alias": "arc_challenge" + }, + "harness|piqa|0": { + "acc,none": 0.6675734494015234, + "acc_stderr,none": 0.01099114155744559, + "acc_norm,none": 0.6610446137105549, + "acc_norm_stderr,none": 0.011044144419710638, + "alias": "piqa" + } + }, + "task_info": { + "model": "Qwen/Qwen1.5-0.5B-Chat-GGUF", + "revision": "main", + "private": false, + "params": 2.0, + "architectures": "?", + "quant_type": "llama.cpp", + "precision": "4bit", + "model_params": 4.0, + "model_size": 2.0, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-05-01T16:23:22Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "llama_cpp" + }, + "quantization_config": { + "quant_method": "llama.cpp", + "ftype": "*q4_0.gguf" + }, + "versions": { + "harness|hellaswag|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|lambada:openai|0": 1.0, + "harness|boolq|0": 2.0, + "harness|winogrande|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|arc:challenge|0": 1.0, + "harness|piqa|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714746426.7282674, + "config": { + "model": "WrapperGGUFLM", + "model_args": "gguf_model=Qwen/Qwen1.5-0.5B-Chat-GGUF,ftype=*q4_0.gguf,dtype=float16,_commit_hash=main", + "batch_size": 1, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/Qwen/results_2024-05-07-09-41-53.json b/Qwen/results_2024-05-07-09-41-53.json new file mode 100644 index 0000000000000000000000000000000000000000..7ad881383b2298ca17d35d4fd9cd1183cb80b170 --- /dev/null +++ b/Qwen/results_2024-05-07-09-41-53.json @@ -0,0 +1,579 @@ +{ + "config_general": { + "lighteval_sha": "no", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-07-09-41-53", + "total_evaluation_time_secondes": "", + "model_name": "Qwen/Qwen1.5-7B-Chat-GGUF", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 4.505956352, + "model_params": 7.721324544, + "quant_type": "llama.cpp", + "precision": "4bit" + }, + "results": { + "harness|openbookqa|0": { + "acc,none": 0.3, + "acc_stderr,none": 0.020514426225628043, + "acc_norm,none": 0.396, + "acc_norm_stderr,none": 0.021893529941665817, + "alias": "openbookqa" + }, + "harness|piqa|0": { + "acc,none": 0.7399347116430903, + "acc_stderr,none": 0.01023489324906131, + "acc_norm,none": 0.7393906420021763, + "acc_norm_stderr,none": 0.010241826155811618, + "alias": "piqa" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.5445532573203143, + "acc_stderr,none": 0.01620516610689256, + "alias": "truthfulqa_mc2" + }, + "harness|winogrande|0": { + "acc,none": 0.6393054459352802, + "acc_stderr,none": 0.01349606439423403, + "alias": "winogrande" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.3537331701346389, + "acc_stderr,none": 0.016737814358846147, + "alias": "truthfulqa_mc1" + }, + "harness|arc:challenge|0": { + "acc,none": 0.4104095563139932, + "acc_stderr,none": 0.014374922192642662, + "acc_norm,none": 0.4232081911262799, + "acc_norm_stderr,none": 0.014438036220848025, + "alias": "arc_challenge" + }, + "harness|hellaswag|0": { + "acc,none": 0.5438159729137622, + "acc_stderr,none": 0.004970585328297622, + "acc_norm,none": 0.6963752240589524, + "acc_norm_stderr,none": 0.00458882795877513, + "alias": "hellaswag" + }, + "harness|arc:easy|0": { + "acc,none": 0.6136363636363636, + "acc_stderr,none": 0.009991296778159622, + "acc_norm,none": 0.6056397306397306, + "acc_norm_stderr,none": 0.010028176038393002, + "alias": "arc_easy" + }, + "harness|boolq|0": { + "acc,none": 0.7981651376146789, + "acc_stderr,none": 0.00701999832474464, + "alias": "boolq" + }, + "harness|lambada:openai|0": { + "perplexity,none": 8.629176527841318, + "perplexity_stderr,none": 0.42869315340212266, + "acc,none": 0.4244129633223365, + "acc_stderr,none": 0.006885918770006381, + "alias": "lambada_openai" + }, + "harness|mmlu|0": { + "acc,none": 0.5183734510753454, + "acc_stderr,none": 0.00400817537653083, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.4359192348565356, + "acc_stderr,none": 0.006934954327448632 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.3968253968253968, + "acc_stderr,none": 0.0437588849272706 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.3333333333333333, + "acc_stderr,none": 0.03681050869161549 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.37254901960784315, + "acc_stderr,none": 0.03393388584958406 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.2742616033755274, + "acc_stderr,none": 0.029041333510598046 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.6528925619834711, + "acc_stderr,none": 0.043457245702925335 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7777777777777778, + "acc_stderr,none": 0.040191074725573483 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.6134969325153374, + "acc_stderr,none": 0.03825825548848607 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.5086705202312138, + "acc_stderr,none": 0.02691504735536981 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.3754189944134078, + "acc_stderr,none": 0.01619510424846353 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.639871382636656, + "acc_stderr,none": 0.027264297599804015 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.6234567901234568, + "acc_stderr,none": 0.026959344518747787 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.32790091264667537, + "acc_stderr,none": 0.01198993664066653 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7368421052631579, + "acc_stderr,none": 0.03377310252209205 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.5616350177019633, + "acc_stderr,none": 0.008371099697022879 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.61, + "acc_stderr,none": 0.04902071300001974 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.5962264150943396, + "acc_stderr,none": 0.03019761160019795 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.4624277456647399, + "acc_stderr,none": 0.0380168510452446 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.37, + "acc_stderr,none": 0.04852365870939099 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.5336322869955157, + "acc_stderr,none": 0.033481800170603065 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.7669902912621359, + "acc_stderr,none": 0.04185832598928315 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8205128205128205, + "acc_stderr,none": 0.02514093595033544 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.54, + "acc_stderr,none": 0.05009082659620332 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.6998722860791826, + "acc_stderr,none": 0.016389249691317425 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.5915032679738562, + "acc_stderr,none": 0.028146405993096358 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.39361702127659576, + "acc_stderr,none": 0.029144544781596154 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.17647058823529413, + "acc_stderr,none": 0.023157468308559366 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.463855421686747, + "acc_stderr,none": 0.03882310850890594 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.6301592460188495, + "acc_stderr,none": 0.008418371310872776 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.34210526315789475, + "acc_stderr,none": 0.04462917535336936 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7727272727272727, + "acc_stderr,none": 0.029857515673386414 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.7823834196891192, + "acc_stderr,none": 0.02977866303775295 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.5846153846153846, + "acc_stderr,none": 0.02498535492310233 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.6302521008403361, + "acc_stderr,none": 0.03135709599613591 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.7761467889908257, + "acc_stderr,none": 0.017871217767790236 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.6717557251908397, + "acc_stderr,none": 0.04118438565806299 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.4820261437908497, + "acc_stderr,none": 0.020214761037872408 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.5818181818181818, + "acc_stderr,none": 0.04724577405731572 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.5428571428571428, + "acc_stderr,none": 0.031891418324213966 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.6865671641791045, + "acc_stderr,none": 0.032801882053486435 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.77, + "acc_stderr,none": 0.04229525846816505 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.48969235648588644, + "acc_stderr,none": 0.008718408268467998 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.42, + "acc_stderr,none": 0.04960449637488584 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.4888888888888889, + "acc_stderr,none": 0.04318275491977976 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.5855263157894737, + "acc_stderr,none": 0.04008973785779206 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.5694444444444444, + "acc_stderr,none": 0.04140685639111503 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.4, + "acc_stderr,none": 0.04923659639173309 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.41, + "acc_stderr,none": 0.04943110704237101 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.33, + "acc_stderr,none": 0.04725815626252604 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.4019607843137255, + "acc_stderr,none": 0.04878608714466996 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.73, + "acc_stderr,none": 0.044619604333847394 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.5531914893617021, + "acc_stderr,none": 0.0325005368436584 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5310344827586206, + "acc_stderr,none": 0.04158632762097828 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.4523809523809524, + "acc_stderr,none": 0.025634258115554958 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.6709677419354839, + "acc_stderr,none": 0.026729499068349958 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.5073891625615764, + "acc_stderr,none": 0.035176035403610105 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.6, + "acc_stderr,none": 0.04923659639173309 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.3333333333333333, + "acc_stderr,none": 0.028742040903948496 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.3509933774834437, + "acc_stderr,none": 0.03896981964257375 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.4351851851851852, + "acc_stderr,none": 0.03381200005643525 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.45535714285714285, + "acc_stderr,none": 0.04726835553719099 + } + }, + "task_info": { + "model": "Qwen/Qwen1.5-7B-Chat-GGUF", + "revision": "main", + "private": false, + "params": 28.0, + "architectures": "?", + "quant_type": "llama.cpp", + "precision": "4bit", + "model_params": 56.0, + "model_size": 28.0, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*q4_0.gguf", + "hardware": "cpu", + "status": "Pending", + "submitted_time": "2024-05-01T16:23:43Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "llama_cpp" + }, + "quantization_config": { + "quant_method": "llama.cpp", + "ftype": "*q4_0.gguf" + }, + "versions": { + "harness|openbookqa|0": 1.0, + "harness|piqa|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|winogrande|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|arc:challenge|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|boolq|0": 2.0, + "harness|lambada:openai|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714965259.0617445, + "config": { + "model": "WrapperGGUFLM", + "model_args": "gguf_model=Qwen/Qwen1.5-7B-Chat-GGUF,ftype=*q4_0.gguf,dtype=float16,_commit_hash=main", + "batch_size": 1, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/SanctumAI/results_2024-05-03-22-24-42.json b/SanctumAI/results_2024-05-03-22-24-42.json new file mode 100644 index 0000000000000000000000000000000000000000..89a93d9f73054462cec1a69bdced326efad48bc8 --- /dev/null +++ b/SanctumAI/results_2024-05-03-22-24-42.json @@ -0,0 +1,579 @@ +{ + "config_general": { + "lighteval_sha": "no", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-03-22-24-42", + "total_evaluation_time_secondes": "", + "model_name": "SanctumAI/Phi-3-mini-4k-instruct-GGUF", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 2.175438336, + "model_params": 3.821079552, + "quant_type": "llama.cpp", + "precision": "4bit" + }, + "results": { + "harness|openbookqa|0": { + "acc,none": 0.338, + "acc_stderr,none": 0.02117566569520941, + "acc_norm,none": 0.438, + "acc_norm_stderr,none": 0.022210326363977417, + "alias": "openbookqa" + }, + "harness|hellaswag|0": { + "acc,none": 0.5976897032463653, + "acc_stderr,none": 0.004893617014975288, + "acc_norm,none": 0.7741485759808803, + "acc_norm_stderr,none": 0.004172872282984298, + "alias": "hellaswag" + }, + "harness|arc:easy|0": { + "acc,none": 0.8047138047138047, + "acc_stderr,none": 0.008134384386937893, + "acc_norm,none": 0.7916666666666666, + "acc_norm_stderr,none": 0.008333333333333193, + "alias": "arc_easy" + }, + "harness|arc:challenge|0": { + "acc,none": 0.5196245733788396, + "acc_stderr,none": 0.014600132075947089, + "acc_norm,none": 0.5503412969283277, + "acc_norm_stderr,none": 0.014537144444284736, + "alias": "arc_challenge" + }, + "harness|boolq|0": { + "acc,none": 0.8636085626911315, + "acc_stderr,none": 0.0060026713013815024, + "alias": "boolq" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.4186046511627907, + "acc_stderr,none": 0.01727001528447686, + "alias": "truthfulqa_mc1" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.6075128202890306, + "acc_stderr,none": 0.01539728015231391, + "alias": "truthfulqa_mc2" + }, + "harness|mmlu|0": { + "acc,none": 0.655177325167355, + "acc_stderr,none": 0.003811132023917817, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.6125398512221042, + "acc_stderr,none": 0.006814771868222836 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.5476190476190477, + "acc_stderr,none": 0.044518079590553275 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.8, + "acc_stderr,none": 0.031234752377721175 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.7794117647058824, + "acc_stderr,none": 0.02910225438967409 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.8016877637130801, + "acc_stderr,none": 0.025955020841621105 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.7768595041322314, + "acc_stderr,none": 0.03800754475228733 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7407407407407407, + "acc_stderr,none": 0.04236511258094633 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.8159509202453987, + "acc_stderr,none": 0.030446777687971716 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.7109826589595376, + "acc_stderr,none": 0.02440517393578323 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.48044692737430167, + "acc_stderr,none": 0.016709709877661995 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6977491961414791, + "acc_stderr,none": 0.02608270069539966 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.7623456790123457, + "acc_stderr,none": 0.023683591837008557 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.4869621903520209, + "acc_stderr,none": 0.012765893883835332 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.8070175438596491, + "acc_stderr,none": 0.030267457554898465 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.7006758931445124, + "acc_stderr,none": 0.007945113982147629 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.68, + "acc_stderr,none": 0.04688261722621504 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.7320754716981132, + "acc_stderr,none": 0.027257260322494845 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.653179190751445, + "acc_stderr,none": 0.036291466701596636 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.36, + "acc_stderr,none": 0.04824181513244218 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6771300448430493, + "acc_stderr,none": 0.031381476375754995 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.8446601941747572, + "acc_stderr,none": 0.035865947385739734 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8675213675213675, + "acc_stderr,none": 0.02220930907316561 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.7, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.8058748403575989, + "acc_stderr,none": 0.014143970276657574 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.696078431372549, + "acc_stderr,none": 0.026336613469046637 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.5354609929078015, + "acc_stderr,none": 0.029752389657427047 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.6507352941176471, + "acc_stderr,none": 0.02895975519682486 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.5, + "acc_stderr,none": 0.03892494720807614 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.769580760480988, + "acc_stderr,none": 0.00743221355809227 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.543859649122807, + "acc_stderr,none": 0.046854730419077895 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.8080808080808081, + "acc_stderr,none": 0.028057791672989017 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8860103626943006, + "acc_stderr,none": 0.022935144053919432 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.7076923076923077, + "acc_stderr,none": 0.02306043838085774 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.819327731092437, + "acc_stderr,none": 0.024991964966600756 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.8660550458715597, + "acc_stderr,none": 0.014602811435592635 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.7022900763358778, + "acc_stderr,none": 0.04010358942462203 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.6944444444444444, + "acc_stderr,none": 0.018635594034423972 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6545454545454545, + "acc_stderr,none": 0.04554619617541054 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.7387755102040816, + "acc_stderr,none": 0.028123429335142783 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.8656716417910447, + "acc_stderr,none": 0.024112678240900826 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.88, + "acc_stderr,none": 0.03265986323710906 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.5623215984776403, + "acc_stderr,none": 0.008412272866798855 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.36, + "acc_stderr,none": 0.04824181513244218 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.6370370370370371, + "acc_stderr,none": 0.04153948404742398 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.7368421052631579, + "acc_stderr,none": 0.03583496176361072 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.8125, + "acc_stderr,none": 0.032639560491693344 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.45, + "acc_stderr,none": 0.049999999999999996 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.47, + "acc_stderr,none": 0.05016135580465919 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.33, + "acc_stderr,none": 0.04725815626252604 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.3431372549019608, + "acc_stderr,none": 0.04724007352383889 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.74, + "acc_stderr,none": 0.04408440022768078 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.6212765957446809, + "acc_stderr,none": 0.03170995606040655 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5724137931034483, + "acc_stderr,none": 0.04122737111370333 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.4894179894179894, + "acc_stderr,none": 0.02574554227604549 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.8290322580645161, + "acc_stderr,none": 0.021417242936321558 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.5615763546798029, + "acc_stderr,none": 0.03491207857486518 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.66, + "acc_stderr,none": 0.04760952285695237 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.337037037037037, + "acc_stderr,none": 0.028820884666253252 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.4304635761589404, + "acc_stderr,none": 0.04042809961395634 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.5787037037037037, + "acc_stderr,none": 0.03367462138896078 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.5, + "acc_stderr,none": 0.04745789978762494 + }, + "harness|piqa|0": { + "acc,none": 0.779651795429815, + "acc_stderr,none": 0.009670535456853164, + "acc_norm,none": 0.7682263329706203, + "acc_norm_stderr,none": 0.009845143772794029, + "alias": "piqa" + }, + "harness|lambada:openai|0": { + "perplexity,none": 5.970033653408986, + "perplexity_stderr,none": 0.17556862043175503, + "acc,none": 0.33242771201241994, + "acc_stderr,none": 0.006563112265118183, + "alias": "lambada_openai" + }, + "harness|winogrande|0": { + "acc,none": 0.7016574585635359, + "acc_stderr,none": 0.01285888501003043, + "alias": "winogrande" + } + }, + "task_info": { + "model": "SanctumAI/Phi-3-mini-4k-instruct-GGUF", + "revision": "main", + "private": false, + "params": 0.0, + "architectures": "?", + "quant_type": "llama.cpp", + "precision": "4bit", + "model_params": 0, + "model_size": 0.0, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-05-01T16:17:48Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "llama_cpp" + }, + "quantization_config": { + "quant_method": "llama.cpp", + "ftype": "*Q4_0.gguf" + }, + "versions": { + "harness|openbookqa|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|boolq|0": 2.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|piqa|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|winogrande|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714697373.5350165, + "config": { + "model": "WrapperGGUFLM", + "model_args": "gguf_model=SanctumAI/Phi-3-mini-4k-instruct-GGUF,ftype=*Q4_0.gguf,dtype=float16,_commit_hash=main", + "batch_size": 1, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/TechxGenus/results_2024-05-01-22-34-56.json b/TechxGenus/results_2024-05-01-22-34-56.json new file mode 100644 index 0000000000000000000000000000000000000000..471a58635c205a89a40e9b3ce65cc3e0d7191c68 --- /dev/null +++ b/TechxGenus/results_2024-05-01-22-34-56.json @@ -0,0 +1,583 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-01-22-34-56", + "total_evaluation_time_secondes": "", + "model_name": "TechxGenus/gemma-7b-AWQ", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 7.17, + "model_params": 7.81, + "quant_type": "AWQ", + "precision": "4bit" + }, + "results": { + "harness|truthfulqa:mc1|0": { + "acc,none": 0.2766217870257038, + "acc_stderr,none": 0.015659605755326916, + "alias": "truthfulqa_mc1" + }, + "harness|piqa|0": { + "acc,none": 0.7976060935799782, + "acc_stderr,none": 0.00937428968280767, + "acc_norm,none": 0.809031556039173, + "acc_norm_stderr,none": 0.00917083663701189, + "alias": "piqa" + }, + "harness|boolq|0": { + "acc,none": 0.8302752293577982, + "acc_stderr,none": 0.0065656268366337065, + "alias": "boolq" + }, + "harness|openbookqa|0": { + "acc,none": 0.316, + "acc_stderr,none": 0.020812359515855857, + "acc_norm,none": 0.454, + "acc_norm_stderr,none": 0.022288147591176945, + "alias": "openbookqa" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.40628500623097014, + "acc_stderr,none": 0.01403539875143181, + "alias": "truthfulqa_mc2" + }, + "harness|winogrande|0": { + "acc,none": 0.7458563535911602, + "acc_stderr,none": 0.01223630721970827, + "alias": "winogrande" + }, + "harness|arc:challenge|0": { + "acc,none": 0.4974402730375427, + "acc_stderr,none": 0.014611199329843774, + "acc_norm,none": 0.514505119453925, + "acc_norm_stderr,none": 0.01460524108137005, + "alias": "arc_challenge" + }, + "harness|arc:easy|0": { + "acc,none": 0.8101851851851852, + "acc_stderr,none": 0.00804684052785223, + "acc_norm,none": 0.7992424242424242, + "acc_norm_stderr,none": 0.008219462692991503, + "alias": "arc_easy" + }, + "harness|mmlu|0": { + "acc,none": 0.6053980914399658, + "acc_stderr,none": 0.0038873124960644378, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.536663124335813, + "acc_stderr,none": 0.006716199085190922 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.4126984126984127, + "acc_stderr,none": 0.04403438954768176 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7090909090909091, + "acc_stderr,none": 0.03546563019624337 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.7794117647058824, + "acc_stderr,none": 0.029102254389674082 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.7763713080168776, + "acc_stderr,none": 0.027123298205229966 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.7603305785123967, + "acc_stderr,none": 0.03896878985070416 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.6759259259259259, + "acc_stderr,none": 0.04524596007030048 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7423312883435583, + "acc_stderr,none": 0.03436150827846917 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.6820809248554913, + "acc_stderr,none": 0.025070713719153186 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.2424581005586592, + "acc_stderr,none": 0.014333522059217887 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6816720257234726, + "acc_stderr,none": 0.026457225067811032 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.7098765432098766, + "acc_stderr,none": 0.025251173936495033 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.45045632333767927, + "acc_stderr,none": 0.012707390438502346 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.8245614035087719, + "acc_stderr,none": 0.029170885500727654 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.6868361763759253, + "acc_stderr,none": 0.0080630455468288 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.6, + "acc_stderr,none": 0.049236596391733084 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.6716981132075471, + "acc_stderr,none": 0.02890159361241178 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.6473988439306358, + "acc_stderr,none": 0.036430371689585475 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.43, + "acc_stderr,none": 0.049756985195624284 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6591928251121076, + "acc_stderr,none": 0.031811497470553604 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.8349514563106796, + "acc_stderr,none": 0.036756688322331886 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8504273504273504, + "acc_stderr,none": 0.02336505149175372 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.68, + "acc_stderr,none": 0.046882617226215034 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.7982120051085568, + "acc_stderr,none": 0.014351702181636863 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.7222222222222222, + "acc_stderr,none": 0.02564686309713791 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.5035460992907801, + "acc_stderr,none": 0.02982674915328092 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.6323529411764706, + "acc_stderr,none": 0.029289413409403192 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.4879518072289157, + "acc_stderr,none": 0.038913644958358196 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.6967825804354891, + "acc_stderr,none": 0.008091643946869077 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.41228070175438597, + "acc_stderr,none": 0.046306532033665956 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7777777777777778, + "acc_stderr,none": 0.02962022787479048 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8082901554404145, + "acc_stderr,none": 0.02840895362624528 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.6025641025641025, + "acc_stderr,none": 0.024811920017903836 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.6134453781512605, + "acc_stderr,none": 0.03163145807552378 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.818348623853211, + "acc_stderr,none": 0.016530617409266878 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.7099236641221374, + "acc_stderr,none": 0.03980066246467765 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.6241830065359477, + "acc_stderr,none": 0.01959402113657745 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6545454545454545, + "acc_stderr,none": 0.04554619617541054 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.7061224489795919, + "acc_stderr,none": 0.029162738410249762 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.7960199004975125, + "acc_stderr,none": 0.02849317624532607 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.8, + "acc_stderr,none": 0.04020151261036846 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.538534728829686, + "acc_stderr,none": 0.008589914937669538 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.32, + "acc_stderr,none": 0.04688261722621504 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.5703703703703704, + "acc_stderr,none": 0.04276349494376599 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.6973684210526315, + "acc_stderr,none": 0.03738520676119668 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.7569444444444444, + "acc_stderr,none": 0.03586879280080343 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.43, + "acc_stderr,none": 0.049756985195624284 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.52, + "acc_stderr,none": 0.050211673156867795 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.35, + "acc_stderr,none": 0.04793724854411019 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.4019607843137255, + "acc_stderr,none": 0.04878608714466996 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.68, + "acc_stderr,none": 0.046882617226215034 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.6085106382978723, + "acc_stderr,none": 0.03190701242326812 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5793103448275863, + "acc_stderr,none": 0.0411391498118926 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.4576719576719577, + "acc_stderr,none": 0.025658868862058325 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7580645161290323, + "acc_stderr,none": 0.024362599693031096 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.541871921182266, + "acc_stderr,none": 0.03505630140785741 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.64, + "acc_stderr,none": 0.048241815132442176 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.3814814814814815, + "acc_stderr,none": 0.02961671892749759 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.41721854304635764, + "acc_stderr,none": 0.0402614149763461 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.5185185185185185, + "acc_stderr,none": 0.034076320938540516 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.42857142857142855, + "acc_stderr,none": 0.04697113923010212 + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.390937355467875, + "perplexity_stderr,none": 0.06679719420613348, + "acc,none": 0.7360760721909567, + "acc_stderr,none": 0.006140628245086325, + "alias": "lambada_openai" + }, + "harness|hellaswag|0": { + "acc,none": 0.5981876120294762, + "acc_stderr,none": 0.004892624490937232, + "acc_norm,none": 0.7983469428400717, + "acc_norm_stderr,none": 0.0040041446222330935, + "alias": "hellaswag" + } + }, + "task_info": { + "model": "TechxGenus/gemma-7b-AWQ", + "revision": "main", + "private": false, + "params": 10.44, + "architectures": "GemmaForCausalLM", + "quant_type": "AWQ", + "precision": "4bit", + "model_params": 20.88, + "model_size": 10.44, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-05-01T07:36:03Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "bits": 4, + "group_size": 128, + "modules_to_not_convert": null, + "quant_method": "awq", + "version": "gemm", + "zero_point": true + }, + "versions": { + "harness|truthfulqa:mc1|0": 2.0, + "harness|piqa|0": 1.0, + "harness|boolq|0": 2.0, + "harness|openbookqa|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|winogrande|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|lambada:openai|0": 1.0, + "harness|hellaswag|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714549105.0151005, + "config": { + "model": "hf", + "model_args": "pretrained=TechxGenus/gemma-7b-AWQ,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/TechxGenus/results_2024-05-02-00-07-14.json b/TechxGenus/results_2024-05-02-00-07-14.json new file mode 100644 index 0000000000000000000000000000000000000000..7a7ae43257f12b6b981c47f6d53e1c18a8191f5e --- /dev/null +++ b/TechxGenus/results_2024-05-02-00-07-14.json @@ -0,0 +1,588 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-02-00-07-14", + "total_evaluation_time_secondes": "", + "model_name": "TechxGenus/gemma-7b-GPTQ", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 7.18, + "model_params": 7.82, + "quant_type": "GPTQ", + "precision": "4bit" + }, + "results": { + "harness|lambada:openai|0": { + "perplexity,none": 5.932209341148522, + "perplexity_stderr,none": 0.1341079006392553, + "acc,none": 0.5761692218125364, + "acc_stderr,none": 0.006884673454916892, + "alias": "lambada_openai" + }, + "harness|boolq|0": { + "acc,none": 0.8027522935779816, + "acc_stderr,none": 0.006959680427057393, + "alias": "boolq" + }, + "harness|openbookqa|0": { + "acc,none": 0.308, + "acc_stderr,none": 0.0206670329874661, + "acc_norm,none": 0.422, + "acc_norm_stderr,none": 0.022109039310618552, + "alias": "openbookqa" + }, + "harness|piqa|0": { + "acc,none": 0.7834602829162133, + "acc_stderr,none": 0.00960998471438461, + "acc_norm,none": 0.794885745375408, + "acc_norm_stderr,none": 0.009420971671017915, + "alias": "piqa" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.2729498164014688, + "acc_stderr,none": 0.015594753632006525, + "alias": "truthfulqa_mc1" + }, + "harness|mmlu|0": { + "acc,none": 0.5710724967953283, + "acc_stderr,none": 0.003966391569739342, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.4973432518597237, + "acc_stderr,none": 0.0068655705992840856 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.38095238095238093, + "acc_stderr,none": 0.043435254289490965 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.6303030303030303, + "acc_stderr,none": 0.03769430314512567 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.7303921568627451, + "acc_stderr,none": 0.03114557065948678 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.7046413502109705, + "acc_stderr,none": 0.029696338713422882 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.71900826446281, + "acc_stderr,none": 0.04103203830514512 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.6018518518518519, + "acc_stderr,none": 0.04732332615978814 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.6380368098159509, + "acc_stderr,none": 0.037757007291414416 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.5751445086705202, + "acc_stderr,none": 0.026613350840261746 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.2424581005586592, + "acc_stderr,none": 0.014333522059217887 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6463022508038585, + "acc_stderr,none": 0.027155208103200875 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.6604938271604939, + "acc_stderr,none": 0.026348564412011638 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.4230769230769231, + "acc_stderr,none": 0.01261820406658839 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7953216374269005, + "acc_stderr,none": 0.030944459778533204 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.6523978113936273, + "acc_stderr,none": 0.008314718279747594 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.61, + "acc_stderr,none": 0.04902071300001974 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.6641509433962264, + "acc_stderr,none": 0.029067220146644823 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.5722543352601156, + "acc_stderr,none": 0.03772446857518026 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.37, + "acc_stderr,none": 0.04852365870939099 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6636771300448431, + "acc_stderr,none": 0.031708824268455 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.8349514563106796, + "acc_stderr,none": 0.036756688322331886 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.7905982905982906, + "acc_stderr,none": 0.026655699653922737 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.61, + "acc_stderr,none": 0.04902071300001975 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.7484035759897829, + "acc_stderr,none": 0.015517322365529617 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.6830065359477124, + "acc_stderr,none": 0.026643278474508755 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.46808510638297873, + "acc_stderr,none": 0.029766675075873866 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.6176470588235294, + "acc_stderr,none": 0.029520095697687754 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.4759036144578313, + "acc_stderr,none": 0.03887971849597264 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.6626584335391615, + "acc_stderr,none": 0.008305114704313213 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.35964912280701755, + "acc_stderr,none": 0.04514496132873632 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7272727272727273, + "acc_stderr,none": 0.03173071239071724 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.7979274611398963, + "acc_stderr,none": 0.02897908979429673 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.5820512820512821, + "acc_stderr,none": 0.02500732988246122 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.6302521008403361, + "acc_stderr,none": 0.03135709599613591 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.7834862385321101, + "acc_stderr,none": 0.01765871059444313 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.6183206106870229, + "acc_stderr,none": 0.0426073515764456 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.5669934640522876, + "acc_stderr,none": 0.020045442473324227 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.5727272727272728, + "acc_stderr,none": 0.04738198703545483 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.6816326530612244, + "acc_stderr,none": 0.029822533793982066 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.7910447761194029, + "acc_stderr,none": 0.028748298931728658 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.79, + "acc_stderr,none": 0.040936018074033256 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.5115762765620044, + "acc_stderr,none": 0.008616921534276603 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.33, + "acc_stderr,none": 0.047258156262526045 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.5481481481481482, + "acc_stderr,none": 0.04299268905480864 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.6776315789473685, + "acc_stderr,none": 0.03803510248351585 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.7430555555555556, + "acc_stderr,none": 0.03653946969442099 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.45, + "acc_stderr,none": 0.05 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.48, + "acc_stderr,none": 0.050211673156867795 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.37, + "acc_stderr,none": 0.048523658709391 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.3431372549019608, + "acc_stderr,none": 0.04724007352383889 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.6, + "acc_stderr,none": 0.04923659639173309 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.5617021276595745, + "acc_stderr,none": 0.03243618636108101 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5517241379310345, + "acc_stderr,none": 0.041443118108781526 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.4074074074074074, + "acc_stderr,none": 0.025305906241590632 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7419354838709677, + "acc_stderr,none": 0.02489246917246283 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.5123152709359606, + "acc_stderr,none": 0.035169204442208966 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.55, + "acc_stderr,none": 0.04999999999999999 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.34814814814814815, + "acc_stderr,none": 0.029045600290616258 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.40397350993377484, + "acc_stderr,none": 0.040064856853653415 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.5462962962962963, + "acc_stderr,none": 0.033953227263757976 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.38392857142857145, + "acc_stderr,none": 0.04616143075028547 + }, + "harness|winogrande|0": { + "acc,none": 0.7269139700078927, + "acc_stderr,none": 0.012522020105869457, + "alias": "winogrande" + }, + "harness|hellaswag|0": { + "acc,none": 0.5654252141007767, + "acc_stderr,none": 0.004946879874422689, + "acc_norm,none": 0.7689703246365266, + "acc_norm_stderr,none": 0.004206299481687509, + "alias": "hellaswag" + }, + "harness|arc:challenge|0": { + "acc,none": 0.48464163822525597, + "acc_stderr,none": 0.014604496129394911, + "acc_norm,none": 0.5093856655290102, + "acc_norm_stderr,none": 0.014608816322065003, + "alias": "arc_challenge" + }, + "harness|arc:easy|0": { + "acc,none": 0.7946127946127947, + "acc_stderr,none": 0.008289582587432945, + "acc_norm,none": 0.7714646464646465, + "acc_norm_stderr,none": 0.008615944722488483, + "alias": "arc_easy" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.4114860662572397, + "acc_stderr,none": 0.013981988649185982, + "alias": "truthfulqa_mc2" + } + }, + "task_info": { + "model": "TechxGenus/gemma-7b-GPTQ", + "revision": "main", + "private": false, + "params": 10.452, + "architectures": "GemmaForCausalLM", + "quant_type": "GPTQ", + "precision": "4bit", + "model_params": 20.904, + "model_size": 10.452, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-05-01T07:38:56Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "bits": 4, + "damp_percent": 0.01, + "desc_act": false, + "group_size": 128, + "is_marlin_format": false, + "model_file_base_name": null, + "model_name_or_path": null, + "quant_method": "gptq", + "static_groups": false, + "sym": true, + "true_sequential": true + }, + "versions": { + "harness|lambada:openai|0": 1.0, + "harness|boolq|0": 2.0, + "harness|openbookqa|0": 1.0, + "harness|piqa|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|winogrande|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714574230.2024841, + "config": { + "model": "hf", + "model_args": "pretrained=TechxGenus/gemma-7b-GPTQ,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/TheBloke/results_2024-04-27-02-47-01.json b/TheBloke/results_2024-04-27-02-47-01.json new file mode 100644 index 0000000000000000000000000000000000000000..fac6b78f0f752e53671eb169f2ffe3fdbdcea140 --- /dev/null +++ b/TheBloke/results_2024-04-27-02-47-01.json @@ -0,0 +1,586 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-04-27-02-47-01", + "total_evaluation_time_secondes": "", + "model_name": "TheBloke/Llama-2-7B-Chat-GPTQ", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 3.9, + "model_params": 6.54, + "quant_type": "GPTQ", + "precision": "4bit" + }, + "results": { + "harness|piqa|0": { + "acc,none": 0.763873775843308, + "acc_stderr,none": 0.009908965890558213, + "acc_norm,none": 0.7665941240478781, + "acc_norm_stderr,none": 0.009869247889521012, + "alias": "piqa" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.2913096695226438, + "acc_stderr,none": 0.015905987048184828, + "alias": "truthfulqa_mc1" + }, + "harness|hellaswag|0": { + "acc,none": 0.5614419438358893, + "acc_stderr,none": 0.0049519641319213225, + "acc_norm,none": 0.7391953794064927, + "acc_norm_stderr,none": 0.004381761941552722, + "alias": "hellaswag" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.44047324022379325, + "acc_stderr,none": 0.01555687649014822, + "alias": "truthfulqa_mc2" + }, + "harness|openbookqa|0": { + "acc,none": 0.328, + "acc_stderr,none": 0.021017027165175492, + "acc_norm,none": 0.4, + "acc_norm_stderr,none": 0.021930844120728505, + "alias": "openbookqa" + }, + "harness|mmlu|0": { + "acc,none": 0.43156245549067085, + "acc_stderr,none": 0.00403805715600188, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.4074388947927736, + "acc_stderr,none": 0.0069121839021850364 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.23809523809523808, + "acc_stderr,none": 0.03809523809523812 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.5636363636363636, + "acc_stderr,none": 0.03872592983524753 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.5882352941176471, + "acc_stderr,none": 0.0345423658538061 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.5822784810126582, + "acc_stderr,none": 0.032103530322412685 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.5537190082644629, + "acc_stderr,none": 0.0453793517794788 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.5185185185185185, + "acc_stderr,none": 0.04830366024635331 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.5276073619631901, + "acc_stderr,none": 0.039223782906109894 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.48554913294797686, + "acc_stderr,none": 0.02690784985628254 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.2424581005586592, + "acc_stderr,none": 0.014333522059217887 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.4983922829581994, + "acc_stderr,none": 0.02839794490780661 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.5246913580246914, + "acc_stderr,none": 0.02778680093142745 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.32920469361147325, + "acc_stderr,none": 0.0120020916669023 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.6549707602339181, + "acc_stderr,none": 0.036459813773888065 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.5098165432893467, + "acc_stderr,none": 0.008732608676943622 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.42, + "acc_stderr,none": 0.049604496374885836 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.4679245283018868, + "acc_stderr,none": 0.03070948699255655 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.3699421965317919, + "acc_stderr,none": 0.036812296333943194 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.41, + "acc_stderr,none": 0.049431107042371025 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.5381165919282511, + "acc_stderr,none": 0.03346015011973228 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.5825242718446602, + "acc_stderr,none": 0.048828405482122375 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.688034188034188, + "acc_stderr,none": 0.030351527323344937 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.42, + "acc_stderr,none": 0.049604496374885836 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.6500638569604087, + "acc_stderr,none": 0.017055679797150426 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.47058823529411764, + "acc_stderr,none": 0.028580341065138286 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.3262411347517731, + "acc_stderr,none": 0.027968453043563168 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.41544117647058826, + "acc_stderr,none": 0.029935342707877743 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.43373493975903615, + "acc_stderr,none": 0.03858158940685515 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.4774130646733832, + "acc_stderr,none": 0.008798512511707348 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.24561403508771928, + "acc_stderr,none": 0.04049339297748141 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.5151515151515151, + "acc_stderr,none": 0.03560716516531061 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.616580310880829, + "acc_stderr,none": 0.03508984236295342 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.36923076923076925, + "acc_stderr,none": 0.02446861524147892 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.33613445378151263, + "acc_stderr,none": 0.03068473711513536 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.563302752293578, + "acc_stderr,none": 0.021264820158714195 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.5190839694656488, + "acc_stderr,none": 0.04382094705550988 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.4117647058823529, + "acc_stderr,none": 0.01991037746310593 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.5181818181818182, + "acc_stderr,none": 0.04785964010794916 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.4530612244897959, + "acc_stderr,none": 0.03186785930004129 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.6716417910447762, + "acc_stderr,none": 0.033206858897443244 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.66, + "acc_stderr,none": 0.04760952285695237 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.3457025055502696, + "acc_stderr,none": 0.00832133835630205 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.32, + "acc_stderr,none": 0.04688261722621504 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.42962962962962964, + "acc_stderr,none": 0.04276349494376599 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.4473684210526316, + "acc_stderr,none": 0.04046336883978251 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.4513888888888889, + "acc_stderr,none": 0.04161402398403279 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.2, + "acc_stderr,none": 0.04020151261036845 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.31, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.27, + "acc_stderr,none": 0.0446196043338474 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.24509803921568626, + "acc_stderr,none": 0.042801058373643945 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.58, + "acc_stderr,none": 0.049604496374885836 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.3872340425531915, + "acc_stderr,none": 0.03184389265339525 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.42758620689655175, + "acc_stderr,none": 0.04122737111370331 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.2777777777777778, + "acc_stderr,none": 0.023068188848261117 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.47419354838709676, + "acc_stderr,none": 0.02840609505765332 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.30049261083743845, + "acc_stderr,none": 0.03225799476233485 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.4, + "acc_stderr,none": 0.04923659639173309 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.24444444444444444, + "acc_stderr,none": 0.026202766534652148 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.2980132450331126, + "acc_stderr,none": 0.037345356767871984 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.2222222222222222, + "acc_stderr,none": 0.028353212866863445 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.36607142857142855, + "acc_stderr,none": 0.04572372358737431 + }, + "harness|winogrande|0": { + "acc,none": 0.6511444356748224, + "acc_stderr,none": 0.013395059320137336, + "alias": "winogrande" + }, + "harness|boolq|0": { + "acc,none": 0.8012232415902141, + "acc_stderr,none": 0.00697994677614537, + "alias": "boolq" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.5526815329331782, + "perplexity_stderr,none": 0.09428366649260858, + "acc,none": 0.6966815447312246, + "acc_stderr,none": 0.006404402872809118, + "alias": "lambada_openai" + }, + "harness|arc:challenge|0": { + "acc,none": 0.42406143344709896, + "acc_stderr,none": 0.014441889627464394, + "acc_norm,none": 0.42918088737201365, + "acc_norm_stderr,none": 0.014464085894870653, + "alias": "arc_challenge" + }, + "harness|arc:easy|0": { + "acc,none": 0.7264309764309764, + "acc_stderr,none": 0.009147424438490745, + "acc_norm,none": 0.6784511784511784, + "acc_norm_stderr,none": 0.00958409157564063, + "alias": "arc_easy" + } + }, + "task_info": { + "model": "TheBloke/Llama-2-7B-Chat-GPTQ", + "revision": "main", + "private": false, + "params": 4.524, + "architectures": "LlamaForCausalLM", + "quant_type": "GPTQ", + "precision": "4bit", + "model_params": 9.048, + "model_size": 4.524, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-26T17:24:28Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "bits": 4, + "group_size": 128, + "damp_percent": 0.01, + "desc_act": false, + "sym": true, + "true_sequential": true, + "model_name_or_path": null, + "model_file_base_name": "model", + "quant_method": "gptq" + }, + "versions": { + "harness|piqa|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|hellaswag|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|openbookqa|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|winogrande|0": 1.0, + "harness|boolq|0": 2.0, + "harness|lambada:openai|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|arc:easy|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714152422.1714947, + "config": { + "model": "hf", + "model_args": "pretrained=TheBloke/Llama-2-7B-Chat-GPTQ,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/TheBloke/results_2024-04-27-08-48-07.json b/TheBloke/results_2024-04-27-08-48-07.json new file mode 100644 index 0000000000000000000000000000000000000000..6c0f8498175cfe9a34078d32b6b5ecd3d92d472e --- /dev/null +++ b/TheBloke/results_2024-04-27-08-48-07.json @@ -0,0 +1,582 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-04-27-08-48-07", + "total_evaluation_time_secondes": "", + "model_name": "TheBloke/Llama-2-7B-Chat-AWQ", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 3.89, + "model_params": 6.53, + "quant_type": "AWQ", + "precision": "4bit" + }, + "results": { + "harness|piqa|0": { + "acc,none": 0.7562568008705114, + "acc_stderr,none": 0.010017199471500614, + "acc_norm,none": 0.7622415669205659, + "acc_norm_stderr,none": 0.009932525779525485, + "alias": "piqa" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.5058974442285646, + "perplexity_stderr,none": 0.09435147029917793, + "acc,none": 0.6902775082476228, + "acc_stderr,none": 0.00644184552805125, + "alias": "lambada_openai" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.29008567931456547, + "acc_stderr,none": 0.015886236874209515, + "alias": "truthfulqa_mc1" + }, + "harness|boolq|0": { + "acc,none": 0.8042813455657493, + "acc_stderr,none": 0.006939251824863106, + "alias": "boolq" + }, + "harness|hellaswag|0": { + "acc,none": 0.5713005377414858, + "acc_stderr,none": 0.004938787067611794, + "acc_norm,none": 0.7488548097988449, + "acc_norm_stderr,none": 0.004327855588466402, + "alias": "hellaswag" + }, + "harness|mmlu|0": { + "acc,none": 0.4528557185586099, + "acc_stderr,none": 0.004036188883688366, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.42422954303931987, + "acc_stderr,none": 0.006938084209855627 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.25396825396825395, + "acc_stderr,none": 0.03893259610604675 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.5757575757575758, + "acc_stderr,none": 0.038592681420702636 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.5931372549019608, + "acc_stderr,none": 0.03447891136353383 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.6371308016877637, + "acc_stderr,none": 0.031299208255302136 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.5867768595041323, + "acc_stderr,none": 0.04495087843548408 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.5462962962962963, + "acc_stderr,none": 0.04812917324536823 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.5828220858895705, + "acc_stderr,none": 0.038741028598180814 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.48554913294797686, + "acc_stderr,none": 0.026907849856282532 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.2424581005586592, + "acc_stderr,none": 0.014333522059217887 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.5209003215434084, + "acc_stderr,none": 0.028373270961069414 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.5185185185185185, + "acc_stderr,none": 0.027801656212323667 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.3546284224250326, + "acc_stderr,none": 0.012218576439090174 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.6608187134502924, + "acc_stderr,none": 0.03631053496488904 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.536530415191503, + "acc_stderr,none": 0.008694819121553921 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.49, + "acc_stderr,none": 0.05024183937956911 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.49433962264150944, + "acc_stderr,none": 0.03077090076385131 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.36416184971098264, + "acc_stderr,none": 0.036690724774169084 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.35, + "acc_stderr,none": 0.047937248544110196 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.5829596412556054, + "acc_stderr,none": 0.03309266936071722 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.6699029126213593, + "acc_stderr,none": 0.04656147110012351 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.6965811965811965, + "acc_stderr,none": 0.030118210106942652 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.51, + "acc_stderr,none": 0.05024183937956911 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.6756066411238825, + "acc_stderr,none": 0.016740929047162702 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.49673202614379086, + "acc_stderr,none": 0.02862930519400354 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.36524822695035464, + "acc_stderr,none": 0.028723863853281278 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.43014705882352944, + "acc_stderr,none": 0.030074971917302875 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.45180722891566266, + "acc_stderr,none": 0.03874371556587953 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.5128371790705233, + "acc_stderr,none": 0.008748422821694416 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.2894736842105263, + "acc_stderr,none": 0.04266339443159395 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.6060606060606061, + "acc_stderr,none": 0.03481285338232962 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.6632124352331606, + "acc_stderr,none": 0.03410780251836183 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.36153846153846153, + "acc_stderr,none": 0.024359581465397 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.36134453781512604, + "acc_stderr,none": 0.031204691225150016 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.6165137614678899, + "acc_stderr,none": 0.02084715664191598 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.5343511450381679, + "acc_stderr,none": 0.04374928560599738 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.434640522875817, + "acc_stderr,none": 0.020054269200726452 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.5272727272727272, + "acc_stderr,none": 0.04782001791380061 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.5224489795918368, + "acc_stderr,none": 0.03197694118713672 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.6965174129353234, + "acc_stderr,none": 0.03251006816458618 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.72, + "acc_stderr,none": 0.04512608598542128 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.35458293688550585, + "acc_stderr,none": 0.00834404825169343 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.27, + "acc_stderr,none": 0.0446196043338474 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.45185185185185184, + "acc_stderr,none": 0.04299268905480864 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.4605263157894737, + "acc_stderr,none": 0.04056242252249033 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.5, + "acc_stderr,none": 0.04181210050035455 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.21, + "acc_stderr,none": 0.040936018074033256 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.33, + "acc_stderr,none": 0.04725815626252604 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.29, + "acc_stderr,none": 0.045604802157206845 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.23529411764705882, + "acc_stderr,none": 0.042207736591714534 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.57, + "acc_stderr,none": 0.04975698519562428 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.3702127659574468, + "acc_stderr,none": 0.031565646822367836 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.4206896551724138, + "acc_stderr,none": 0.0411391498118926 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.2777777777777778, + "acc_stderr,none": 0.023068188848261117 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.5258064516129032, + "acc_stderr,none": 0.02840609505765332 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.31527093596059114, + "acc_stderr,none": 0.032690808719701876 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.4, + "acc_stderr,none": 0.049236596391733084 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.27037037037037037, + "acc_stderr,none": 0.02708037281514566 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.2781456953642384, + "acc_stderr,none": 0.03658603262763743 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.24074074074074073, + "acc_stderr,none": 0.02915752218460559 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.33035714285714285, + "acc_stderr,none": 0.04464285714285714 + }, + "harness|winogrande|0": { + "acc,none": 0.6471981057616417, + "acc_stderr,none": 0.013429728101788947, + "alias": "winogrande" + }, + "harness|openbookqa|0": { + "acc,none": 0.318, + "acc_stderr,none": 0.020847571620814007, + "acc_norm,none": 0.422, + "acc_norm_stderr,none": 0.022109039310618552, + "alias": "openbookqa" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.4429339932483719, + "acc_stderr,none": 0.015653506812546597, + "alias": "truthfulqa_mc2" + }, + "harness|arc:challenge|0": { + "acc,none": 0.4300341296928328, + "acc_stderr,none": 0.014467631559137994, + "acc_norm,none": 0.431740614334471, + "acc_norm_stderr,none": 0.014474591427196204, + "alias": "arc_challenge" + }, + "harness|arc:easy|0": { + "acc,none": 0.726010101010101, + "acc_stderr,none": 0.009151805901544028, + "acc_norm,none": 0.6696127946127947, + "acc_norm_stderr,none": 0.009651430216428185, + "alias": "arc_easy" + } + }, + "task_info": { + "model": "TheBloke/Llama-2-7B-Chat-AWQ", + "revision": "main", + "private": false, + "params": 4.516, + "architectures": "LlamaForCausalLM", + "quant_type": "AWQ", + "precision": "4bit", + "model_params": 9.032, + "model_size": 4.516, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-26T17:26:23Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "quant_method": "awq", + "zero_point": true, + "group_size": 128, + "bits": 4, + "version": "gemm" + }, + "versions": { + "harness|piqa|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|boolq|0": 2.0, + "harness|hellaswag|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|winogrande|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|arc:challenge|0": 1.0, + "harness|arc:easy|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714157340.7852106, + "config": { + "model": "hf", + "model_args": "pretrained=TheBloke/Llama-2-7B-Chat-AWQ,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/TheBloke/results_2024-04-27-23-05-56.json b/TheBloke/results_2024-04-27-23-05-56.json new file mode 100644 index 0000000000000000000000000000000000000000..f754248b32353aec67c604ce24ceb037e074a55e --- /dev/null +++ b/TheBloke/results_2024-04-27-23-05-56.json @@ -0,0 +1,579 @@ +{ + "config_general": { + "lighteval_sha": "no", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-04-27-23-05-56", + "total_evaluation_time_secondes": "", + "model_name": "TheBloke/Llama-2-7B-Chat-GGUF", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 3.825065984, + "model_params": 6.738415616, + "quant_type": "llama.cpp", + "precision": "4bit" + }, + "results": { + "harness|truthfulqa:mc1|0": { + "acc,none": 0.29865361077111385, + "acc_stderr,none": 0.016021570613768542, + "alias": "truthfulqa_mc1" + }, + "harness|lambada:openai|0": { + "perplexity,none": 4.835081603467283, + "perplexity_stderr,none": 0.15195496196061345, + "acc,none": 0.4865127110421114, + "acc_stderr,none": 0.006963442876327696, + "alias": "lambada_openai" + }, + "harness|arc:challenge|0": { + "acc,none": 0.42662116040955633, + "acc_stderr,none": 0.014453185592920293, + "acc_norm,none": 0.4325938566552901, + "acc_norm_stderr,none": 0.014478005694182526, + "alias": "arc_challenge" + }, + "harness|openbookqa|0": { + "acc,none": 0.31, + "acc_stderr,none": 0.02070404102172479, + "acc_norm,none": 0.434, + "acc_norm_stderr,none": 0.022187215803029008, + "alias": "openbookqa" + }, + "harness|hellaswag|0": { + "acc,none": 0.5728938458474407, + "acc_stderr,none": 0.004936470085238477, + "acc_norm,none": 0.7499502091216889, + "acc_norm_stderr,none": 0.004321564303822491, + "alias": "hellaswag" + }, + "harness|boolq|0": { + "acc,none": 0.8024464831804281, + "acc_stderr,none": 0.006963746631628736, + "alias": "boolq" + }, + "harness|mmlu|0": { + "acc,none": 0.4325594644637516, + "acc_stderr,none": 0.00404401665456216, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.3895855472901169, + "acc_stderr,none": 0.006936149839962311 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.20634920634920634, + "acc_stderr,none": 0.03619604524124252 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.3878787878787879, + "acc_stderr,none": 0.0380491365397101 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.5049019607843137, + "acc_stderr,none": 0.03509143375606787 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.42616033755274263, + "acc_stderr,none": 0.03219035703131774 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.5371900826446281, + "acc_stderr,none": 0.045517111961042175 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.49074074074074076, + "acc_stderr,none": 0.04832853553437055 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.4723926380368098, + "acc_stderr,none": 0.0392237829061099 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.4884393063583815, + "acc_stderr,none": 0.026911898686377906 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.2424581005586592, + "acc_stderr,none": 0.014333522059217887 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.5080385852090032, + "acc_stderr,none": 0.028394421370984545 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.5061728395061729, + "acc_stderr,none": 0.027818623962583295 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.34419817470664926, + "acc_stderr,none": 0.012134433741002574 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.631578947368421, + "acc_stderr,none": 0.03699658017656878 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.5236562600579338, + "acc_stderr,none": 0.008731196155986961 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.43, + "acc_stderr,none": 0.049756985195624284 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.5132075471698113, + "acc_stderr,none": 0.030762134874500482 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.3872832369942196, + "acc_stderr,none": 0.037143259063020656 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.36, + "acc_stderr,none": 0.04824181513244218 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.5739910313901345, + "acc_stderr,none": 0.033188332862172806 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.6116504854368932, + "acc_stderr,none": 0.0482572933735639 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.7051282051282052, + "acc_stderr,none": 0.029872577708891172 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.46, + "acc_stderr,none": 0.05009082659620332 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.648786717752235, + "acc_stderr,none": 0.01706998205149943 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.5032679738562091, + "acc_stderr,none": 0.028629305194003543 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.35815602836879434, + "acc_stderr,none": 0.028602085862759415 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.39338235294117646, + "acc_stderr,none": 0.029674288281311183 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.4397590361445783, + "acc_stderr,none": 0.03864139923699122 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.49496262593435164, + "acc_stderr,none": 0.008805335177966452 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.3333333333333333, + "acc_stderr,none": 0.044346007015849245 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.5707070707070707, + "acc_stderr,none": 0.03526552724601199 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.6373056994818653, + "acc_stderr,none": 0.034697137917043715 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.382051282051282, + "acc_stderr,none": 0.02463554916390823 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.3277310924369748, + "acc_stderr,none": 0.030489911417673227 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.5798165137614679, + "acc_stderr,none": 0.0211624200482735 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.549618320610687, + "acc_stderr,none": 0.04363643698524779 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.4215686274509804, + "acc_stderr,none": 0.019977422600227474 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.5181818181818182, + "acc_stderr,none": 0.04785964010794916 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.46938775510204084, + "acc_stderr,none": 0.031949171367580624 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.6517412935323383, + "acc_stderr,none": 0.03368787466115459 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.73, + "acc_stderr,none": 0.0446196043338474 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.3460196638122423, + "acc_stderr,none": 0.008328893618355438 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.26, + "acc_stderr,none": 0.04408440022768077 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.4444444444444444, + "acc_stderr,none": 0.04292596718256981 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.4276315789473684, + "acc_stderr,none": 0.04026097083296559 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.4652777777777778, + "acc_stderr,none": 0.04171115858181618 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.21, + "acc_stderr,none": 0.040936018074033256 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.32, + "acc_stderr,none": 0.046882617226215034 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.22549019607843138, + "acc_stderr,none": 0.041583075330832865 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.55, + "acc_stderr,none": 0.049999999999999996 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.3659574468085106, + "acc_stderr,none": 0.03148955829745529 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.42758620689655175, + "acc_stderr,none": 0.041227371113703316 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.2751322751322751, + "acc_stderr,none": 0.023000086859068646 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.4838709677419355, + "acc_stderr,none": 0.028429203176724555 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.3103448275862069, + "acc_stderr,none": 0.032550867699701024 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.41, + "acc_stderr,none": 0.049431107042371025 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.27037037037037037, + "acc_stderr,none": 0.02708037281514566 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.271523178807947, + "acc_stderr,none": 0.036313298039696525 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.22685185185185186, + "acc_stderr,none": 0.02856165010242226 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.38392857142857145, + "acc_stderr,none": 0.04616143075028547 + }, + "harness|arc:easy|0": { + "acc,none": 0.7281144781144782, + "acc_stderr,none": 0.009129795867310492, + "acc_norm,none": 0.6784511784511784, + "acc_norm_stderr,none": 0.009584091575640625, + "alias": "arc_easy" + }, + "harness|winogrande|0": { + "acc,none": 0.6779794790844514, + "acc_stderr,none": 0.013132070202071081, + "alias": "winogrande" + }, + "harness|piqa|0": { + "acc,none": 0.749183895538629, + "acc_stderr,none": 0.010113869547069044, + "acc_norm,none": 0.7568008705114254, + "acc_norm_stderr,none": 0.010009611953858934, + "alias": "piqa" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.4434567291361564, + "acc_stderr,none": 0.015648678178541143, + "alias": "truthfulqa_mc2" + } + }, + "task_info": { + "model": "TheBloke/Llama-2-7B-Chat-GGUF", + "revision": "main", + "private": false, + "params": 28.0, + "architectures": "?", + "quant_type": "llama.cpp", + "precision": "4bit", + "model_params": 56.0, + "model_size": 28.0, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-26T17:46:46Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "llama_cpp" + }, + "quantization_config": { + "quant_method": "llama.cpp", + "ftype": "*Q4_0.gguf" + }, + "versions": { + "harness|truthfulqa:mc1|0": 2.0, + "harness|lambada:openai|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|boolq|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|arc:easy|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|piqa|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714179285.517894, + "config": { + "model": "WrapperGGUFLM", + "model_args": "gguf_model=TheBloke/Llama-2-7B-Chat-GGUF,ftype=*Q4_0.gguf,dtype=float16,_commit_hash=main", + "batch_size": 1, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/TheBloke/results_2024-04-29-01-54-05.json b/TheBloke/results_2024-04-29-01-54-05.json new file mode 100644 index 0000000000000000000000000000000000000000..bb886c6ef6e0df1d68a2cc2a22bbd852b3bc0a6e --- /dev/null +++ b/TheBloke/results_2024-04-29-01-54-05.json @@ -0,0 +1,583 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-04-29-01-54-05", + "total_evaluation_time_secondes": "", + "model_name": "TheBloke/Mistral-7B-Instruct-v0.2-AWQ", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 4.15, + "model_params": 7.03, + "quant_type": "AWQ", + "precision": "4bit" + }, + "results": { + "harness|truthfulqa:mc2|0": { + "acc,none": 0.670141078497724, + "acc_stderr,none": 0.01521788784865362, + "alias": "truthfulqa_mc2" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.5091799265605875, + "acc_stderr,none": 0.017500550724819753, + "alias": "truthfulqa_mc1" + }, + "harness|mmlu|0": { + "acc,none": 0.5812562313060817, + "acc_stderr,none": 0.003955803368572514, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5298618490967056, + "acc_stderr,none": 0.006858786328047652 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.3492063492063492, + "acc_stderr,none": 0.04263906892795132 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7333333333333333, + "acc_stderr,none": 0.03453131801885417 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.7696078431372549, + "acc_stderr,none": 0.029554292605695053 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.7679324894514767, + "acc_stderr,none": 0.02747974455080852 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.743801652892562, + "acc_stderr,none": 0.03984979653302872 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7037037037037037, + "acc_stderr,none": 0.04414343666854933 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7423312883435583, + "acc_stderr,none": 0.03436150827846917 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.6502890173410405, + "acc_stderr,none": 0.025674281456531025 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.33519553072625696, + "acc_stderr,none": 0.015788007190185884 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.639871382636656, + "acc_stderr,none": 0.027264297599804012 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.6574074074074074, + "acc_stderr,none": 0.026406145973625655 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.4106910039113429, + "acc_stderr,none": 0.012564871542534353 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7894736842105263, + "acc_stderr,none": 0.031267817146631786 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.652075957515288, + "acc_stderr,none": 0.008252496734366984 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.63, + "acc_stderr,none": 0.04852365870939098 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.6566037735849056, + "acc_stderr,none": 0.02922452646912479 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.5549132947976878, + "acc_stderr,none": 0.03789401760283647 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.38, + "acc_stderr,none": 0.048783173121456316 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.5919282511210763, + "acc_stderr,none": 0.03298574607842821 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.7184466019417476, + "acc_stderr,none": 0.04453254836326466 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8675213675213675, + "acc_stderr,none": 0.022209309073165616 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.62, + "acc_stderr,none": 0.04878317312145633 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.7752234993614304, + "acc_stderr,none": 0.014927447101937157 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.6666666666666666, + "acc_stderr,none": 0.02699254433929723 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.46099290780141844, + "acc_stderr,none": 0.02973659252642444 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.6139705882352942, + "acc_stderr,none": 0.029573269134411127 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.4578313253012048, + "acc_stderr,none": 0.0387862677100236 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.6733831654208645, + "acc_stderr,none": 0.008218778690022905 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.38596491228070173, + "acc_stderr,none": 0.045796394220704334 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7121212121212122, + "acc_stderr,none": 0.03225883512300993 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.7979274611398963, + "acc_stderr,none": 0.028979089794296732 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.5538461538461539, + "acc_stderr,none": 0.02520357177302833 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.6512605042016807, + "acc_stderr,none": 0.030956636328566548 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.7889908256880734, + "acc_stderr,none": 0.01749392240411265 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.6870229007633588, + "acc_stderr,none": 0.04066962905677697 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.5784313725490197, + "acc_stderr,none": 0.01997742260022747 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6545454545454545, + "acc_stderr,none": 0.04554619617541054 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.6653061224489796, + "acc_stderr,none": 0.030209235226242307 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.835820895522388, + "acc_stderr,none": 0.02619392354445413 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.85, + "acc_stderr,none": 0.03588702812826371 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.49825562955915004, + "acc_stderr,none": 0.008669172503365446 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.29, + "acc_stderr,none": 0.045604802157206845 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.5925925925925926, + "acc_stderr,none": 0.042446332383532286 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.625, + "acc_stderr,none": 0.039397364351956274 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.6736111111111112, + "acc_stderr,none": 0.03921067198982266 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.41, + "acc_stderr,none": 0.049431107042371025 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.59, + "acc_stderr,none": 0.04943110704237102 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.37, + "acc_stderr,none": 0.048523658709391 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.4019607843137255, + "acc_stderr,none": 0.04878608714466996 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.68, + "acc_stderr,none": 0.046882617226215034 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.48936170212765956, + "acc_stderr,none": 0.03267862331014063 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5172413793103449, + "acc_stderr,none": 0.04164188720169375 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.41534391534391535, + "acc_stderr,none": 0.02537952491077841 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.6903225806451613, + "acc_stderr,none": 0.026302774983517418 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.5024630541871922, + "acc_stderr,none": 0.035179450386910616 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.6, + "acc_stderr,none": 0.049236596391733084 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.3333333333333333, + "acc_stderr,none": 0.028742040903948496 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.3576158940397351, + "acc_stderr,none": 0.03913453431177258 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.4675925925925926, + "acc_stderr,none": 0.03402801581358966 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.5, + "acc_stderr,none": 0.04745789978762494 + }, + "harness|hellaswag|0": { + "acc,none": 0.6559450308703445, + "acc_stderr,none": 0.004740882120999965, + "acc_norm,none": 0.8328022306313483, + "acc_norm_stderr,none": 0.003723897305645456, + "alias": "hellaswag" + }, + "harness|openbookqa|0": { + "acc,none": 0.342, + "acc_stderr,none": 0.02123614719989926, + "acc_norm,none": 0.454, + "acc_norm_stderr,none": 0.022288147591176945, + "alias": "openbookqa" + }, + "harness|boolq|0": { + "acc,none": 0.8510703363914373, + "acc_stderr,none": 0.006226813679382003, + "alias": "boolq" + }, + "harness|piqa|0": { + "acc,none": 0.7986942328618063, + "acc_stderr,none": 0.00935543109899043, + "acc_norm,none": 0.8057671381936888, + "acc_norm_stderr,none": 0.009230209366168259, + "alias": "piqa" + }, + "harness|winogrande|0": { + "acc,none": 0.7411207576953434, + "acc_stderr,none": 0.012310515810993376, + "alias": "winogrande" + }, + "harness|arc:challenge|0": { + "acc,none": 0.537542662116041, + "acc_stderr,none": 0.01457014449507558, + "acc_norm,none": 0.5597269624573379, + "acc_norm_stderr,none": 0.014506769524804243, + "alias": "arc_challenge" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.5114616007527477, + "perplexity_stderr,none": 0.07624527890354445, + "acc,none": 0.7098777411216767, + "acc_stderr,none": 0.006322580641394919, + "alias": "lambada_openai" + }, + "harness|arc:easy|0": { + "acc,none": 0.8042929292929293, + "acc_stderr,none": 0.008141015407566888, + "acc_norm,none": 0.7571548821548821, + "acc_norm_stderr,none": 0.008798836444222035, + "alias": "arc_easy" + } + }, + "task_info": { + "model": "TheBloke/Mistral-7B-Instruct-v0.2-AWQ", + "revision": "main", + "private": false, + "params": 4.784, + "architectures": "MistralForCausalLM", + "quant_type": "AWQ", + "precision": "4bit", + "model_params": 9.568, + "model_size": 4.784, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-28T08:53:22Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "bits": 4, + "group_size": 128, + "modules_to_not_convert": [], + "quant_method": "awq", + "version": "gemm", + "zero_point": true + }, + "versions": { + "harness|truthfulqa:mc2|0": 2.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|hellaswag|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|boolq|0": 2.0, + "harness|piqa|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|arc:easy|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714303185.1043832, + "config": { + "model": "hf", + "model_args": "pretrained=TheBloke/Mistral-7B-Instruct-v0.2-AWQ,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/TheBloke/results_2024-04-29-22-05-21.json b/TheBloke/results_2024-04-29-22-05-21.json new file mode 100644 index 0000000000000000000000000000000000000000..55289a601c66e011f674b156f5354f256e302563 --- /dev/null +++ b/TheBloke/results_2024-04-29-22-05-21.json @@ -0,0 +1,579 @@ +{ + "config_general": { + "lighteval_sha": "no", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-04-29-22-05-21", + "total_evaluation_time_secondes": "", + "model_name": "TheBloke/Mistral-7B-Instruct-v0.2-GGUF", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 4.108181504, + "model_params": 7.241732096, + "quant_type": "llama.cpp", + "precision": "4bit" + }, + "results": { + "harness|winogrande|0": { + "acc,none": 0.734017363851618, + "acc_stderr,none": 0.012418323153051046, + "alias": "winogrande" + }, + "harness|piqa|0": { + "acc,none": 0.7758433079434167, + "acc_stderr,none": 0.009729897956410032, + "acc_norm,none": 0.7845484221980413, + "acc_norm_stderr,none": 0.00959246311565809, + "alias": "piqa" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.6784488430446446, + "acc_stderr,none": 0.015160742121748417, + "alias": "truthfulqa_mc2" + }, + "harness|hellaswag|0": { + "acc,none": 0.668990240987851, + "acc_stderr,none": 0.004696148339570966, + "acc_norm,none": 0.8399721171081458, + "acc_norm_stderr,none": 0.0036588262081015126, + "alias": "hellaswag" + }, + "harness|openbookqa|0": { + "acc,none": 0.388, + "acc_stderr,none": 0.021814300984787635, + "acc_norm,none": 0.476, + "acc_norm_stderr,none": 0.0223572738810164, + "alias": "openbookqa" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.5312117503059975, + "acc_stderr,none": 0.017469364874577547, + "alias": "truthfulqa_mc1" + }, + "harness|lambada:openai|0": { + "perplexity,none": 4.439210478325373, + "perplexity_stderr,none": 0.10946463394642139, + "acc,none": 0.5010673394139337, + "acc_stderr,none": 0.006965961785703057, + "alias": "lambada_openai" + }, + "harness|arc:easy|0": { + "acc,none": 0.7790404040404041, + "acc_stderr,none": 0.00851343094701945, + "acc_norm,none": 0.7058080808080808, + "acc_norm_stderr,none": 0.009350328648861737, + "alias": "arc_easy" + }, + "harness|boolq|0": { + "acc,none": 0.854434250764526, + "acc_stderr,none": 0.00616824285431075, + "alias": "boolq" + }, + "harness|mmlu|0": { + "acc,none": 0.5844609030052699, + "acc_stderr,none": 0.003963750018814539, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5430393198724761, + "acc_stderr,none": 0.0068896056546608395 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.3888888888888889, + "acc_stderr,none": 0.04360314860077459 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7151515151515152, + "acc_stderr,none": 0.03524390844511781 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.7843137254901961, + "acc_stderr,none": 0.028867431449849303 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.7468354430379747, + "acc_stderr,none": 0.028304657943035293 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.7355371900826446, + "acc_stderr,none": 0.040261875275912046 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7037037037037037, + "acc_stderr,none": 0.04414343666854933 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7423312883435583, + "acc_stderr,none": 0.03436150827846917 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.6589595375722543, + "acc_stderr,none": 0.025522474632121615 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.35195530726256985, + "acc_stderr,none": 0.015972668523689063 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6559485530546624, + "acc_stderr,none": 0.026981478043648033 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.6635802469135802, + "acc_stderr,none": 0.026289734945952922 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.4335071707953064, + "acc_stderr,none": 0.012656810383983967 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.8070175438596491, + "acc_stderr,none": 0.030267457554898465 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.6530415191503057, + "acc_stderr,none": 0.008275074231324661 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.61, + "acc_stderr,none": 0.04902071300001974 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.6452830188679245, + "acc_stderr,none": 0.02944517532819958 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.5433526011560693, + "acc_stderr,none": 0.03798106566014498 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.35, + "acc_stderr,none": 0.047937248544110196 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6233183856502242, + "acc_stderr,none": 0.032521134899291884 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.7378640776699029, + "acc_stderr,none": 0.04354631077260595 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8504273504273504, + "acc_stderr,none": 0.02336505149175372 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.61, + "acc_stderr,none": 0.04902071300001975 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.768837803320562, + "acc_stderr,none": 0.015075523238101086 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.6405228758169934, + "acc_stderr,none": 0.027475969910660952 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.48226950354609927, + "acc_stderr,none": 0.02980873964223777 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.6617647058823529, + "acc_stderr,none": 0.028739328513983572 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.4759036144578313, + "acc_stderr,none": 0.03887971849597264 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.6746831329216769, + "acc_stderr,none": 0.008223038429253208 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.4824561403508772, + "acc_stderr,none": 0.04700708033551038 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7373737373737373, + "acc_stderr,none": 0.03135305009533084 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.7979274611398963, + "acc_stderr,none": 0.028979089794296732 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.5461538461538461, + "acc_stderr,none": 0.025242770987126184 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.6134453781512605, + "acc_stderr,none": 0.03163145807552378 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.7834862385321101, + "acc_stderr,none": 0.017658710594443152 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.6793893129770993, + "acc_stderr,none": 0.04093329229834278 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.5686274509803921, + "acc_stderr,none": 0.02003639376835264 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.7, + "acc_stderr,none": 0.04389311454644287 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.6775510204081633, + "acc_stderr,none": 0.029923100563683903 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.8407960199004975, + "acc_stderr,none": 0.02587064676616913 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.86, + "acc_stderr,none": 0.03487350880197768 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.4906438312718046, + "acc_stderr,none": 0.008661995829603667 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.27, + "acc_stderr,none": 0.044619604333847415 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.5407407407407407, + "acc_stderr,none": 0.04304979692464242 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.625, + "acc_stderr,none": 0.039397364351956274 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.6319444444444444, + "acc_stderr,none": 0.04032999053960719 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.36, + "acc_stderr,none": 0.04824181513244218 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.58, + "acc_stderr,none": 0.049604496374885836 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.35, + "acc_stderr,none": 0.047937248544110175 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.35294117647058826, + "acc_stderr,none": 0.047551296160629475 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.67, + "acc_stderr,none": 0.04725815626252609 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.5106382978723404, + "acc_stderr,none": 0.03267862331014063 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5517241379310345, + "acc_stderr,none": 0.04144311810878152 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.4074074074074074, + "acc_stderr,none": 0.025305906241590636 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.6903225806451613, + "acc_stderr,none": 0.026302774983517418 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.4729064039408867, + "acc_stderr,none": 0.035128190778761066 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.61, + "acc_stderr,none": 0.04902071300001974 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.3333333333333333, + "acc_stderr,none": 0.028742040903948492 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.3708609271523179, + "acc_stderr,none": 0.03943966699183629 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.4675925925925926, + "acc_stderr,none": 0.03402801581358966 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.5089285714285714, + "acc_stderr,none": 0.04745033255489123 + }, + "harness|arc:challenge|0": { + "acc,none": 0.5349829351535836, + "acc_stderr,none": 0.014575583922019663, + "acc_norm,none": 0.5503412969283277, + "acc_norm_stderr,none": 0.014537144444284736, + "alias": "arc_challenge" + } + }, + "task_info": { + "model": "TheBloke/Mistral-7B-Instruct-v0.2-GGUF", + "revision": "main", + "private": false, + "params": 28.0, + "architectures": "?", + "quant_type": "llama.cpp", + "precision": "4bit", + "model_params": 56.0, + "model_size": 28.0, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-28T09:07:10Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "llama_cpp" + }, + "quantization_config": { + "quant_method": "llama.cpp", + "ftype": "*Q4_0.gguf" + }, + "versions": { + "harness|winogrande|0": 1.0, + "harness|piqa|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|hellaswag|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|lambada:openai|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|boolq|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|arc:challenge|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714355029.9857757, + "config": { + "model": "WrapperGGUFLM", + "model_args": "gguf_model=TheBloke/Mistral-7B-Instruct-v0.2-GGUF,ftype=*Q4_0.gguf,dtype=float16,_commit_hash=main", + "batch_size": 1, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/TheBloke/results_2024-05-02-16-52-29.json b/TheBloke/results_2024-05-02-16-52-29.json new file mode 100644 index 0000000000000000000000000000000000000000..d75113542d4218362b09a045967681d7f3e9bbf2 --- /dev/null +++ b/TheBloke/results_2024-05-02-16-52-29.json @@ -0,0 +1,582 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-02-16-52-29", + "total_evaluation_time_secondes": "", + "model_name": "TheBloke/SOLAR-10.7B-Instruct-v1.0-AWQ", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 5.96, + "model_params": 10.55, + "quant_type": "AWQ", + "precision": "4bit" + }, + "results": { + "harness|arc:challenge|0": { + "acc,none": 0.5981228668941979, + "acc_stderr,none": 0.014327268614578274, + "acc_norm,none": 0.6322525597269625, + "acc_norm_stderr,none": 0.014090995618168473, + "alias": "arc_challenge" + }, + "harness|openbookqa|0": { + "acc,none": 0.374, + "acc_stderr,none": 0.021660710347204487, + "acc_norm,none": 0.494, + "acc_norm_stderr,none": 0.022381462412439324, + "alias": "openbookqa" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.5471236230110159, + "acc_stderr,none": 0.01742558984831402, + "alias": "truthfulqa_mc1" + }, + "harness|piqa|0": { + "acc,none": 0.8112078346028292, + "acc_stderr,none": 0.009130687388952816, + "acc_norm,none": 0.8144722524483133, + "acc_norm_stderr,none": 0.009069597302603996, + "alias": "piqa" + }, + "harness|boolq|0": { + "acc,none": 0.8798165137614679, + "acc_stderr,none": 0.005687363587870172, + "alias": "boolq" + }, + "harness|arc:easy|0": { + "acc,none": 0.8308080808080808, + "acc_stderr,none": 0.007693223639488826, + "acc_norm,none": 0.8101851851851852, + "acc_norm_stderr,none": 0.008046840527852234, + "alias": "arc_easy" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.185362930040927, + "perplexity_stderr,none": 0.07406409479719334, + "acc,none": 0.7279254803027363, + "acc_stderr,none": 0.006200111064998447, + "alias": "lambada_openai" + }, + "harness|mmlu|0": { + "acc,none": 0.6232730380287709, + "acc_stderr,none": 0.0038533964574598407, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5727948990435706, + "acc_stderr,none": 0.006732570609347105 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.373015873015873, + "acc_stderr,none": 0.04325506042017086 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.806060606060606, + "acc_stderr,none": 0.030874145136562097 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.8333333333333334, + "acc_stderr,none": 0.026156867523931055 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.8270042194092827, + "acc_stderr,none": 0.024621562866768445 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.8264462809917356, + "acc_stderr,none": 0.0345727283691767 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7685185185185185, + "acc_stderr,none": 0.04077494709252626 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.6993865030674846, + "acc_stderr,none": 0.03602511318806771 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.7023121387283237, + "acc_stderr,none": 0.024617055388676982 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.3307262569832402, + "acc_stderr,none": 0.01573502625896612 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6945337620578779, + "acc_stderr,none": 0.026160584450140453 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.7438271604938271, + "acc_stderr,none": 0.0242885336377261 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.47392438070404175, + "acc_stderr,none": 0.012752858346533134 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7543859649122807, + "acc_stderr,none": 0.033014059469872487 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.7016414547795301, + "acc_stderr,none": 0.007934723097613417 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.66, + "acc_stderr,none": 0.04760952285695237 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.6867924528301886, + "acc_stderr,none": 0.028544793319055326 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.6473988439306358, + "acc_stderr,none": 0.036430371689585475 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.36, + "acc_stderr,none": 0.04824181513244218 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.695067264573991, + "acc_stderr,none": 0.030898610882477515 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.8155339805825242, + "acc_stderr,none": 0.03840423627288276 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8547008547008547, + "acc_stderr,none": 0.023086635086841407 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.73, + "acc_stderr,none": 0.044619604333847394 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.80970625798212, + "acc_stderr,none": 0.01403694585038138 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.7156862745098039, + "acc_stderr,none": 0.025829163272757468 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.5035460992907801, + "acc_stderr,none": 0.02982674915328092 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.7022058823529411, + "acc_stderr,none": 0.027778298701545443 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.5180722891566265, + "acc_stderr,none": 0.038899512528272166 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.7237569060773481, + "acc_stderr,none": 0.007875459074235897 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.47368421052631576, + "acc_stderr,none": 0.046970851366478626 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.803030303030303, + "acc_stderr,none": 0.02833560973246336 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8808290155440415, + "acc_stderr,none": 0.02338193534812142 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.6358974358974359, + "acc_stderr,none": 0.024396672985094767 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.6764705882352942, + "acc_stderr,none": 0.03038835355188678 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.8201834862385321, + "acc_stderr,none": 0.016465345467391534 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.7480916030534351, + "acc_stderr,none": 0.038073871163060866 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.6519607843137255, + "acc_stderr,none": 0.01927099870822398 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6272727272727273, + "acc_stderr,none": 0.04631381319425465 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.6938775510204082, + "acc_stderr,none": 0.02950489645459596 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.8208955223880597, + "acc_stderr,none": 0.027113286753111837 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.87, + "acc_stderr,none": 0.03379976689896309 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.5233111322549953, + "acc_stderr,none": 0.008565620869325358 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.35, + "acc_stderr,none": 0.0479372485441102 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.5851851851851851, + "acc_stderr,none": 0.04256193767901408 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.7039473684210527, + "acc_stderr,none": 0.037150621549989056 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.7291666666666666, + "acc_stderr,none": 0.037161774375660164 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.4, + "acc_stderr,none": 0.04923659639173309 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.5, + "acc_stderr,none": 0.050251890762960605 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.29, + "acc_stderr,none": 0.04560480215720684 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.38235294117647056, + "acc_stderr,none": 0.04835503696107224 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.71, + "acc_stderr,none": 0.045604802157206845 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.5617021276595745, + "acc_stderr,none": 0.03243618636108101 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5379310344827586, + "acc_stderr,none": 0.04154659671707548 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.4470899470899471, + "acc_stderr,none": 0.025606723995777025 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7677419354838709, + "acc_stderr,none": 0.02402225613030824 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.458128078817734, + "acc_stderr,none": 0.03505630140785741 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.62, + "acc_stderr,none": 0.048783173121456316 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.362962962962963, + "acc_stderr,none": 0.02931820364520686 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.3509933774834437, + "acc_stderr,none": 0.03896981964257374 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.5370370370370371, + "acc_stderr,none": 0.03400603625538271 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.5, + "acc_stderr,none": 0.04745789978762494 + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.7003691298306434, + "acc_stderr,none": 0.015084794104413395, + "alias": "truthfulqa_mc2" + }, + "harness|winogrande|0": { + "acc,none": 0.7458563535911602, + "acc_stderr,none": 0.012236307219708278, + "alias": "winogrande" + }, + "harness|hellaswag|0": { + "acc,none": 0.6806413065126469, + "acc_stderr,none": 0.004652753439460115, + "acc_norm,none": 0.8593905596494722, + "acc_norm_stderr,none": 0.0034690778470563856, + "alias": "hellaswag" + } + }, + "task_info": { + "model": "TheBloke/SOLAR-10.7B-Instruct-v1.0-AWQ", + "revision": "main", + "private": false, + "params": 6.652, + "architectures": "LlamaForCausalLM", + "quant_type": "AWQ", + "precision": "4bit", + "model_params": 13.304, + "model_size": 6.652, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-05-01T16:10:00Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "bits": 4, + "group_size": 128, + "quant_method": "awq", + "version": "gemm", + "zero_point": true + }, + "versions": { + "harness|arc:challenge|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|piqa|0": 1.0, + "harness|boolq|0": 2.0, + "harness|arc:easy|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|winogrande|0": 1.0, + "harness|hellaswag|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714605116.6428869, + "config": { + "model": "hf", + "model_args": "pretrained=TheBloke/SOLAR-10.7B-Instruct-v1.0-AWQ,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/TheBloke/results_2024-05-03-08-18-06.json b/TheBloke/results_2024-05-03-08-18-06.json new file mode 100644 index 0000000000000000000000000000000000000000..b28ce81706222f84fc1507187c688990af203f8c --- /dev/null +++ b/TheBloke/results_2024-05-03-08-18-06.json @@ -0,0 +1,579 @@ +{ + "config_general": { + "lighteval_sha": "no", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-03-08-18-06", + "total_evaluation_time_secondes": "", + "model_name": "TheBloke/SOLAR-10.7B-Instruct-v1.0-GGUF", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 6.071640064, + "model_params": 10.731524096, + "quant_type": "llama.cpp", + "precision": "4bit" + }, + "results": { + "harness|piqa|0": { + "acc,none": 0.823177366702938, + "acc_stderr,none": 0.008901456201658638, + "acc_norm,none": 0.8264417845484222, + "acc_norm_stderr,none": 0.008836375101386918, + "alias": "piqa" + }, + "harness|winogrande|0": { + "acc,none": 0.7592738752959748, + "acc_stderr,none": 0.012015559212224188, + "alias": "winogrande" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.9791798149200446, + "perplexity_stderr,none": 0.09951611112279504, + "acc,none": 0.524160683097225, + "acc_stderr,none": 0.006957840284118759, + "alias": "lambada_openai" + }, + "harness|openbookqa|0": { + "acc,none": 0.372, + "acc_stderr,none": 0.0216371979857224, + "acc_norm,none": 0.49, + "acc_norm_stderr,none": 0.02237859698923078, + "alias": "openbookqa" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.5630354957160343, + "acc_stderr,none": 0.017363844503195974, + "alias": "truthfulqa_mc1" + }, + "harness|mmlu|0": { + "acc,none": 0.6204244409628258, + "acc_stderr,none": 0.0038773551606319846, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5615302869287991, + "acc_stderr,none": 0.006910974026687457 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.4365079365079365, + "acc_stderr,none": 0.04435932892851466 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.6060606060606061, + "acc_stderr,none": 0.0381549430868893 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.6127450980392157, + "acc_stderr,none": 0.03418931233833344 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.6919831223628692, + "acc_stderr,none": 0.030052389335605695 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.8016528925619835, + "acc_stderr,none": 0.03640118271990947 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7685185185185185, + "acc_stderr,none": 0.04077494709252626 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7361963190184049, + "acc_stderr,none": 0.03462419931615624 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.7196531791907514, + "acc_stderr,none": 0.024182427496577605 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.358659217877095, + "acc_stderr,none": 0.016040454426164478 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6784565916398714, + "acc_stderr,none": 0.026527724079528872 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.7469135802469136, + "acc_stderr,none": 0.024191808600713002 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.4810951760104302, + "acc_stderr,none": 0.012761104871472652 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.8011695906432749, + "acc_stderr,none": 0.03061111655743253 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.7064692629546186, + "acc_stderr,none": 0.007895000819957698 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.65, + "acc_stderr,none": 0.0479372485441102 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.6867924528301886, + "acc_stderr,none": 0.02854479331905533 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.6473988439306358, + "acc_stderr,none": 0.036430371689585475 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.35, + "acc_stderr,none": 0.04793724854411019 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.7040358744394619, + "acc_stderr,none": 0.0306365913486998 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.7864077669902912, + "acc_stderr,none": 0.040580420156460344 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8760683760683761, + "acc_stderr,none": 0.021586494001281382 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.73, + "acc_stderr,none": 0.044619604333847394 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.8199233716475096, + "acc_stderr,none": 0.013740797258579823 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.7091503267973857, + "acc_stderr,none": 0.02600480036395213 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.524822695035461, + "acc_stderr,none": 0.02979071924382972 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.6911764705882353, + "acc_stderr,none": 0.028064998167040094 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.5421686746987951, + "acc_stderr,none": 0.038786267710023595 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.7299317517062074, + "acc_stderr,none": 0.007807463005859416 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.5087719298245614, + "acc_stderr,none": 0.047028804320496165 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.797979797979798, + "acc_stderr,none": 0.028606204289229893 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8652849740932642, + "acc_stderr,none": 0.024639789097709443 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.6538461538461539, + "acc_stderr,none": 0.024121125416941187 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.7058823529411765, + "acc_stderr,none": 0.02959732973097809 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.8422018348623853, + "acc_stderr,none": 0.015630022970092455 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.7175572519083969, + "acc_stderr,none": 0.03948406125768361 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.6339869281045751, + "acc_stderr,none": 0.019488025745529665 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6636363636363637, + "acc_stderr,none": 0.04525393596302505 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.6775510204081633, + "acc_stderr,none": 0.029923100563683903 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.845771144278607, + "acc_stderr,none": 0.025538433368578348 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.9, + "acc_stderr,none": 0.03015113445777634 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.5166508087535681, + "acc_stderr,none": 0.008558572662836687 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.35, + "acc_stderr,none": 0.04793724854411021 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.5703703703703704, + "acc_stderr,none": 0.04276349494376599 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.7236842105263158, + "acc_stderr,none": 0.03639057569952929 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.7222222222222222, + "acc_stderr,none": 0.037455547914624576 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.4, + "acc_stderr,none": 0.04923659639173309 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.48, + "acc_stderr,none": 0.050211673156867795 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.34, + "acc_stderr,none": 0.04760952285695235 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.4019607843137255, + "acc_stderr,none": 0.04878608714466996 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.71, + "acc_stderr,none": 0.04560480215720684 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.5659574468085107, + "acc_stderr,none": 0.03240038086792747 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5379310344827586, + "acc_stderr,none": 0.04154659671707548 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.4312169312169312, + "acc_stderr,none": 0.0255064816981382 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7709677419354839, + "acc_stderr,none": 0.023904914311782644 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.4433497536945813, + "acc_stderr,none": 0.03495334582162933 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.58, + "acc_stderr,none": 0.049604496374885836 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.3592592592592593, + "acc_stderr,none": 0.029252905927251972 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.31125827814569534, + "acc_stderr,none": 0.03780445850526733 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.49537037037037035, + "acc_stderr,none": 0.03409825519163572 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.5089285714285714, + "acc_stderr,none": 0.04745033255489123 + }, + "harness|boolq|0": { + "acc,none": 0.882874617737003, + "acc_stderr,none": 0.005624288190378989, + "alias": "boolq" + }, + "harness|hellaswag|0": { + "acc,none": 0.6772555267874926, + "acc_stderr,none": 0.004665704208339031, + "acc_norm,none": 0.8599880501892053, + "acc_norm_stderr,none": 0.0034629026011362076, + "alias": "hellaswag" + }, + "harness|arc:easy|0": { + "acc,none": 0.8337542087542088, + "acc_stderr,none": 0.00763945790688671, + "acc_norm,none": 0.8173400673400674, + "acc_norm_stderr,none": 0.00792850371920913, + "alias": "arc_easy" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.7113200322582366, + "acc_stderr,none": 0.014928830439186688, + "alias": "truthfulqa_mc2" + }, + "harness|arc:challenge|0": { + "acc,none": 0.6040955631399317, + "acc_stderr,none": 0.01429122839353659, + "acc_norm,none": 0.6245733788395904, + "acc_norm_stderr,none": 0.014150631435111728, + "alias": "arc_challenge" + } + }, + "task_info": { + "model": "TheBloke/SOLAR-10.7B-Instruct-v1.0-GGUF", + "revision": "main", + "private": false, + "params": 42.8, + "architectures": "?", + "quant_type": "llama.cpp", + "precision": "4bit", + "model_params": 85.6, + "model_size": 42.8, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-05-01T16:10:42Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "llama_cpp" + }, + "quantization_config": { + "quant_method": "llama.cpp", + "ftype": "*Q4_0.gguf" + }, + "versions": { + "harness|piqa|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|boolq|0": 2.0, + "harness|hellaswag|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|arc:challenge|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714640508.2427852, + "config": { + "model": "WrapperGGUFLM", + "model_args": "gguf_model=TheBloke/SOLAR-10.7B-Instruct-v1.0-GGUF,ftype=*Q4_0.gguf,dtype=float16,_commit_hash=main", + "batch_size": 1, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/TheBloke/results_2024-05-07-15-11-38.json b/TheBloke/results_2024-05-07-15-11-38.json new file mode 100644 index 0000000000000000000000000000000000000000..9b3c35598bf3b3f736ca6bec684a0b9ba7950239 --- /dev/null +++ b/TheBloke/results_2024-05-07-15-11-38.json @@ -0,0 +1,586 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-07-15-11-38", + "total_evaluation_time_secondes": "", + "model_name": "TheBloke/SOLAR-10.7B-Instruct-v1.0-GPTQ", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 5.98, + "model_params": 10.57, + "quant_type": "GPTQ", + "precision": "4bit" + }, + "results": { + "harness|piqa|0": { + "acc,none": 0.8079434167573449, + "acc_stderr,none": 0.009190740295126482, + "acc_norm,none": 0.8063112078346029, + "acc_norm_stderr,none": 0.009220384152336643, + "alias": "piqa" + }, + "harness|mmlu|0": { + "acc,none": 0.6274747187010398, + "acc_stderr,none": 0.003850177736503787, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5804463336875664, + "acc_stderr,none": 0.006792090451321374 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.42063492063492064, + "acc_stderr,none": 0.04415438226743744 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7818181818181819, + "acc_stderr,none": 0.03225078108306289 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.8137254901960784, + "acc_stderr,none": 0.027325470966716323 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.8059071729957806, + "acc_stderr,none": 0.02574490253229091 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.768595041322314, + "acc_stderr,none": 0.038498560987940904 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7777777777777778, + "acc_stderr,none": 0.040191074725573483 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7116564417177914, + "acc_stderr,none": 0.035590395316173425 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.7138728323699421, + "acc_stderr,none": 0.02433214677913412 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.3452513966480447, + "acc_stderr,none": 0.015901432608930358 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6720257234726688, + "acc_stderr,none": 0.02666441088693761 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.7438271604938271, + "acc_stderr,none": 0.024288533637726095 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.4973924380704042, + "acc_stderr,none": 0.012770062445433172 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7602339181286549, + "acc_stderr,none": 0.03274485211946956 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.7122626327647248, + "acc_stderr,none": 0.00782546582386488 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.63, + "acc_stderr,none": 0.048523658709390974 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.7094339622641509, + "acc_stderr,none": 0.02794321998933713 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.6589595375722543, + "acc_stderr,none": 0.036146654241808254 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.38, + "acc_stderr,none": 0.04878317312145633 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6860986547085202, + "acc_stderr,none": 0.031146796482972465 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.8058252427184466, + "acc_stderr,none": 0.039166677628225836 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8846153846153846, + "acc_stderr,none": 0.020930193185179333 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.74, + "acc_stderr,none": 0.04408440022768078 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.8250319284802043, + "acc_stderr,none": 0.01358661921990334 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.696078431372549, + "acc_stderr,none": 0.026336613469046647 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.5035460992907801, + "acc_stderr,none": 0.02982674915328092 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.7463235294117647, + "acc_stderr,none": 0.02643132987078952 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.536144578313253, + "acc_stderr,none": 0.03882310850890593 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.7237569060773481, + "acc_stderr,none": 0.007880951300681182 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.49122807017543857, + "acc_stderr,none": 0.04702880432049615 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.803030303030303, + "acc_stderr,none": 0.02833560973246336 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8756476683937824, + "acc_stderr,none": 0.02381447708659355 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.6282051282051282, + "acc_stderr,none": 0.024503472557110932 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.6680672268907563, + "acc_stderr,none": 0.030588697013783642 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.8201834862385321, + "acc_stderr,none": 0.016465345467391545 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.7175572519083969, + "acc_stderr,none": 0.03948406125768361 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.6535947712418301, + "acc_stderr,none": 0.019249785691717203 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6454545454545455, + "acc_stderr,none": 0.04582004841505417 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.7061224489795919, + "acc_stderr,none": 0.029162738410249762 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.8308457711442786, + "acc_stderr,none": 0.026508590656233257 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.87, + "acc_stderr,none": 0.03379976689896309 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.520139549635268, + "acc_stderr,none": 0.008524961863333031 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.34, + "acc_stderr,none": 0.04760952285695235 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.6148148148148148, + "acc_stderr,none": 0.042039210401562783 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.7105263157894737, + "acc_stderr,none": 0.036906779861372814 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.7430555555555556, + "acc_stderr,none": 0.03653946969442099 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.41, + "acc_stderr,none": 0.049431107042371025 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.49, + "acc_stderr,none": 0.05024183937956911 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.33, + "acc_stderr,none": 0.047258156262526045 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.35294117647058826, + "acc_stderr,none": 0.04755129616062946 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.68, + "acc_stderr,none": 0.046882617226215034 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.5872340425531914, + "acc_stderr,none": 0.03218471141400351 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5379310344827586, + "acc_stderr,none": 0.04154659671707548 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.41005291005291006, + "acc_stderr,none": 0.025331202438944437 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7677419354838709, + "acc_stderr,none": 0.02402225613030824 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.4630541871921182, + "acc_stderr,none": 0.035083705204426656 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.65, + "acc_stderr,none": 0.047937248544110196 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.3333333333333333, + "acc_stderr,none": 0.028742040903948482 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.3443708609271523, + "acc_stderr,none": 0.03879687024073327 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.5416666666666666, + "acc_stderr,none": 0.03398110890294636 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.48214285714285715, + "acc_stderr,none": 0.047427623612430116 + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.1608315965568927, + "perplexity_stderr,none": 0.07289151576789415, + "acc,none": 0.7294779739957307, + "acc_stderr,none": 0.006188985712381461, + "alias": "lambada_openai" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.7074062163257564, + "acc_stderr,none": 0.015118268786707145, + "alias": "truthfulqa_mc2" + }, + "harness|openbookqa|0": { + "acc,none": 0.352, + "acc_stderr,none": 0.021380042385946044, + "acc_norm,none": 0.478, + "acc_norm_stderr,none": 0.022361396739207867, + "alias": "openbookqa" + }, + "harness|boolq|0": { + "acc,none": 0.8788990825688073, + "acc_stderr,none": 0.005706052483368355, + "alias": "boolq" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.5618115055079559, + "acc_stderr,none": 0.01736923616440442, + "alias": "truthfulqa_mc1" + }, + "harness|hellaswag|0": { + "acc,none": 0.676458872734515, + "acc_stderr,none": 0.004668710689192406, + "acc_norm,none": 0.858195578570006, + "acc_norm_stderr,none": 0.003481364840771094, + "alias": "hellaswag" + }, + "harness|arc:challenge|0": { + "acc,none": 0.6092150170648464, + "acc_stderr,none": 0.014258563880513778, + "acc_norm,none": 0.6322525597269625, + "acc_norm_stderr,none": 0.014090995618168478, + "alias": "arc_challenge" + }, + "harness|arc:easy|0": { + "acc,none": 0.8333333333333334, + "acc_stderr,none": 0.007647191129018639, + "acc_norm,none": 0.8080808080808081, + "acc_norm_stderr,none": 0.00808080808080797, + "alias": "arc_easy" + }, + "harness|winogrande|0": { + "acc,none": 0.7537490134175217, + "acc_stderr,none": 0.012108365307437509, + "alias": "winogrande" + } + }, + "task_info": { + "model": "TheBloke/SOLAR-10.7B-Instruct-v1.0-GPTQ", + "revision": "main", + "private": false, + "params": 6.668, + "architectures": "LlamaForCausalLM", + "quant_type": "GPTQ", + "precision": "4bit", + "model_params": 13.336, + "model_size": 6.668, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-05-07T04:22:25Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "bits": 4, + "group_size": 128, + "damp_percent": 0.1, + "desc_act": true, + "sym": true, + "true_sequential": true, + "model_name_or_path": null, + "model_file_base_name": "model", + "quant_method": "gptq" + }, + "versions": { + "harness|piqa|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|lambada:openai|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|openbookqa|0": 1.0, + "harness|boolq|0": 2.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|hellaswag|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|winogrande|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715055897.2560651, + "config": { + "model": "hf", + "model_args": "pretrained=TheBloke/SOLAR-10.7B-Instruct-v1.0-GPTQ,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 1, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/TheBloke/results_2024-05-07-21-25-07.json b/TheBloke/results_2024-05-07-21-25-07.json new file mode 100644 index 0000000000000000000000000000000000000000..ded24ffb1114565efef0db388b97cface740e85d --- /dev/null +++ b/TheBloke/results_2024-05-07-21-25-07.json @@ -0,0 +1,579 @@ +{ + "config_general": { + "lighteval_sha": "no", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-07-21-25-07", + "total_evaluation_time_secondes": "", + "model_name": "TheBloke/phi-2-GGUF", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 1.60065536, + "model_params": 2.77968384, + "quant_type": "llama.cpp", + "precision": "4bit" + }, + "results": { + "harness|hellaswag|0": { + "acc,none": 0.49790878311093406, + "acc_stderr,none": 0.004989737768749929, + "acc_norm,none": 0.6547500497908784, + "acc_norm_stderr,none": 0.004744780201276645, + "alias": "hellaswag" + }, + "harness|arc:easy|0": { + "acc,none": 0.4292929292929293, + "acc_stderr,none": 0.010156678075911101, + "acc_norm,none": 0.4385521885521885, + "acc_norm_stderr,none": 0.010182010275471116, + "alias": "arc_easy" + }, + "harness|boolq|0": { + "acc,none": 0.3782874617737003, + "acc_stderr,none": 0.008482001133931, + "alias": "boolq" + }, + "harness|mmlu|0": { + "acc,none": 0.22945449366187154, + "acc_stderr,none": 0.0035426239458926224, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.24208289054197663, + "acc_stderr,none": 0.0062426684031394305 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.2857142857142857, + "acc_stderr,none": 0.04040610178208841 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.21818181818181817, + "acc_stderr,none": 0.03225078108306289 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.25, + "acc_stderr,none": 0.03039153369274154 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.270042194092827, + "acc_stderr,none": 0.028900721906293426 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.2396694214876033, + "acc_stderr,none": 0.03896878985070417 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.25925925925925924, + "acc_stderr,none": 0.04236511258094634 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.22085889570552147, + "acc_stderr,none": 0.032591773927421776 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.24855491329479767, + "acc_stderr,none": 0.023267528432100174 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.23798882681564246, + "acc_stderr,none": 0.014242630070574885 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.1864951768488746, + "acc_stderr,none": 0.02212243977248077 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.21604938271604937, + "acc_stderr,none": 0.022899162918445813 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.2457627118644068, + "acc_stderr,none": 0.01099615663514269 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.3216374269005848, + "acc_stderr,none": 0.03582529442573122 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.23978113936272932, + "acc_stderr,none": 0.00764225029165751 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.21509433962264152, + "acc_stderr,none": 0.025288394502891377 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.20809248554913296, + "acc_stderr,none": 0.030952890217749884 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.18, + "acc_stderr,none": 0.038612291966536955 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.31390134529147984, + "acc_stderr,none": 0.03114679648297246 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.17475728155339806, + "acc_stderr,none": 0.03760178006026621 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.2905982905982906, + "acc_stderr,none": 0.029745048572674057 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.23754789272030652, + "acc_stderr,none": 0.015218733046150195 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.22549019607843138, + "acc_stderr,none": 0.023929155517351284 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.23404255319148937, + "acc_stderr,none": 0.025257861359432407 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.18382352941176472, + "acc_stderr,none": 0.02352924218519311 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.28313253012048195, + "acc_stderr,none": 0.03507295431370518 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.2170945726356841, + "acc_stderr,none": 0.007428786285788534 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.23684210526315788, + "acc_stderr,none": 0.039994238792813386 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.17676767676767677, + "acc_stderr,none": 0.027178752639044915 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.19689119170984457, + "acc_stderr,none": 0.02869787397186069 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.20256410256410257, + "acc_stderr,none": 0.020377660970371397 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.21008403361344538, + "acc_stderr,none": 0.026461398717471874 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.1926605504587156, + "acc_stderr,none": 0.016909276884936073 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.2595419847328244, + "acc_stderr,none": 0.03844876139785271 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.25, + "acc_stderr,none": 0.01751781884501444 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.21818181818181817, + "acc_stderr,none": 0.03955932861795833 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.18775510204081633, + "acc_stderr,none": 0.02500025603954622 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.24378109452736318, + "acc_stderr,none": 0.030360490154014652 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.28, + "acc_stderr,none": 0.045126085985421276 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.21249603552172533, + "acc_stderr,none": 0.007271218700485502 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.22, + "acc_stderr,none": 0.04163331998932269 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.18518518518518517, + "acc_stderr,none": 0.03355677216313142 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.17763157894736842, + "acc_stderr,none": 0.031103182383123398 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.2569444444444444, + "acc_stderr,none": 0.03653946969442099 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.2, + "acc_stderr,none": 0.040201512610368445 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.26, + "acc_stderr,none": 0.044084400227680794 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.21, + "acc_stderr,none": 0.040936018074033256 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.21568627450980393, + "acc_stderr,none": 0.040925639582376556 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.28, + "acc_stderr,none": 0.045126085985421276 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.26382978723404255, + "acc_stderr,none": 0.02880998985410298 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.2413793103448276, + "acc_stderr,none": 0.03565998174135302 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.20899470899470898, + "acc_stderr,none": 0.020940481565334835 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.1774193548387097, + "acc_stderr,none": 0.021732540689329265 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.15270935960591134, + "acc_stderr,none": 0.025308904539380624 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.25, + "acc_stderr,none": 0.04351941398892446 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.2111111111111111, + "acc_stderr,none": 0.02488211685765508 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.1986754966887417, + "acc_stderr,none": 0.032578473844367746 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.1527777777777778, + "acc_stderr,none": 0.02453632602613422 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.3125, + "acc_stderr,none": 0.043994650575715215 + }, + "harness|lambada:openai|0": { + "perplexity,none": 1.0694247819579417, + "perplexity_stderr,none": 0.01328762392218948, + "acc,none": 0.7234620609353775, + "acc_stderr,none": 0.006231567654090111, + "alias": "lambada_openai" + }, + "harness|arc:challenge|0": { + "acc,none": 0.3583617747440273, + "acc_stderr,none": 0.014012883334859864, + "acc_norm,none": 0.3856655290102389, + "acc_norm_stderr,none": 0.014224250973257172, + "alias": "arc_challenge" + }, + "harness|piqa|0": { + "acc,none": 0.7595212187159956, + "acc_stderr,none": 0.009971345364651074, + "acc_norm,none": 0.7622415669205659, + "acc_norm_stderr,none": 0.009932525779525483, + "alias": "piqa" + }, + "harness|openbookqa|0": { + "acc,none": 0.274, + "acc_stderr,none": 0.01996610354027947, + "acc_norm,none": 0.318, + "acc_norm_stderr,none": 0.020847571620814014, + "alias": "openbookqa" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.4664689437320238, + "acc_stderr,none": 0.014567932598063655, + "alias": "truthfulqa_mc2" + }, + "harness|winogrande|0": { + "acc,none": 0.7411207576953434, + "acc_stderr,none": 0.012310515810993376, + "alias": "winogrande" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.3047735618115055, + "acc_stderr,none": 0.016114124156882424, + "alias": "truthfulqa_mc1" + } + }, + "task_info": { + "model": "TheBloke/phi-2-GGUF", + "revision": "main", + "private": false, + "params": 0.0, + "architectures": "?", + "quant_type": "llama.cpp", + "precision": "4bit", + "model_params": 0, + "model_size": 0.0, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "cpu", + "status": "Pending", + "submitted_time": "2024-05-07T03:59:13Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "llama_cpp" + }, + "quantization_config": { + "quant_method": "llama.cpp", + "ftype": "*Q4_0.gguf" + }, + "versions": { + "harness|hellaswag|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|boolq|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|lambada:openai|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|piqa|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|winogrande|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715074986.1140783, + "config": { + "model": "WrapperGGUFLM", + "model_args": "gguf_model=TheBloke/phi-2-GGUF,ftype=*Q4_0.gguf,dtype=float16,_commit_hash=main", + "batch_size": 1, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/TheBloke/results_2024-05-11-15-26-38.json b/TheBloke/results_2024-05-11-15-26-38.json new file mode 100644 index 0000000000000000000000000000000000000000..ef44e16d6b9456c3670fd947a8d6dd4db127d949 --- /dev/null +++ b/TheBloke/results_2024-05-11-15-26-38.json @@ -0,0 +1,582 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-11-15-26-38", + "total_evaluation_time_secondes": "", + "model_name": "TheBloke/Llama-2-13B-chat-AWQ", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 7.25, + "model_params": 12.79, + "quant_type": "AWQ", + "precision": "4bit" + }, + "results": { + "harness|arc:challenge|0": { + "acc,none": 0.4564846416382253, + "acc_stderr,none": 0.014555949760496439, + "acc_norm,none": 0.5025597269624573, + "acc_norm_stderr,none": 0.014611199329843784, + "alias": "arc_challenge" + }, + "harness|lambada:openai|0": { + "perplexity,none": 2.943540165174782, + "perplexity_stderr,none": 0.07048825902928478, + "acc,none": 0.727731418591112, + "acc_stderr,none": 0.006201495026535792, + "alias": "lambada_openai" + }, + "harness|boolq|0": { + "acc,none": 0.8027522935779816, + "acc_stderr,none": 0.0069596804270573975, + "alias": "boolq" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.43653026778274867, + "acc_stderr,none": 0.01578076424843842, + "alias": "truthfulqa_mc2" + }, + "harness|hellaswag|0": { + "acc,none": 0.6022704640509858, + "acc_stderr,none": 0.004884287515461496, + "acc_norm,none": 0.7892850029874527, + "acc_norm_stderr,none": 0.0040698290284162586, + "alias": "hellaswag" + }, + "harness|arc:easy|0": { + "acc,none": 0.7798821548821548, + "acc_stderr,none": 0.008501788774716771, + "acc_norm,none": 0.7436868686868687, + "acc_norm_stderr,none": 0.008958775997918346, + "alias": "arc_easy" + }, + "harness|piqa|0": { + "acc,none": 0.779107725788901, + "acc_stderr,none": 0.009679088048842217, + "acc_norm,none": 0.7927094668117519, + "acc_norm_stderr,none": 0.009457844699952377, + "alias": "piqa" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.27906976744186046, + "acc_stderr,none": 0.01570210709062791, + "alias": "truthfulqa_mc1" + }, + "harness|openbookqa|0": { + "acc,none": 0.352, + "acc_stderr,none": 0.02138004238594605, + "acc_norm,none": 0.44, + "acc_norm_stderr,none": 0.022221331534143057, + "alias": "openbookqa" + }, + "harness|winogrande|0": { + "acc,none": 0.7111286503551697, + "acc_stderr,none": 0.012738241271018443, + "alias": "winogrande" + }, + "harness|mmlu|0": { + "acc,none": 0.5237857855006409, + "acc_stderr,none": 0.003985973730899309, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.485653560042508, + "acc_stderr,none": 0.006840270638361877 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.24603174603174602, + "acc_stderr,none": 0.038522733649243156 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.6666666666666666, + "acc_stderr,none": 0.0368105086916155 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.7107843137254902, + "acc_stderr,none": 0.03182231867647553 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.7468354430379747, + "acc_stderr,none": 0.028304657943035282 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.6859504132231405, + "acc_stderr,none": 0.042369647530410184 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.6388888888888888, + "acc_stderr,none": 0.04643454608906275 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.6932515337423313, + "acc_stderr,none": 0.036230899157241474 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.5924855491329479, + "acc_stderr,none": 0.0264545781469315 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.24916201117318434, + "acc_stderr,none": 0.014465893829859926 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6109324758842444, + "acc_stderr,none": 0.027690337536485376 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.6018518518518519, + "acc_stderr,none": 0.02723741509459247 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.39895697522816165, + "acc_stderr,none": 0.012506757655293674 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7719298245614035, + "acc_stderr,none": 0.03218093795602357 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.6057289990344383, + "acc_stderr,none": 0.008413631260310068 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.52, + "acc_stderr,none": 0.050211673156867795 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.5622641509433962, + "acc_stderr,none": 0.03053333843046752 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.4277456647398844, + "acc_stderr,none": 0.03772446857518027 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.31, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6143497757847534, + "acc_stderr,none": 0.03266842214289202 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.7281553398058253, + "acc_stderr,none": 0.044052680241409216 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.7991452991452992, + "acc_stderr,none": 0.026246772946890477 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.62, + "acc_stderr,none": 0.04878317312145632 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.7637292464878672, + "acc_stderr,none": 0.015190473717037495 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.6111111111111112, + "acc_stderr,none": 0.02791405551046802 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.425531914893617, + "acc_stderr,none": 0.029494827600144366 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.4852941176470588, + "acc_stderr,none": 0.03035969707904611 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.46987951807228917, + "acc_stderr,none": 0.03885425420866767 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.607084822879428, + "acc_stderr,none": 0.008540524382266563 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.2719298245614035, + "acc_stderr,none": 0.04185774424022056 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.6616161616161617, + "acc_stderr,none": 0.03371124142626302 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.7461139896373057, + "acc_stderr,none": 0.03141024780565319 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.47435897435897434, + "acc_stderr,none": 0.025317649726448663 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.5, + "acc_stderr,none": 0.032478490123081544 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.7174311926605504, + "acc_stderr,none": 0.01930424349770715 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.6412213740458015, + "acc_stderr,none": 0.04206739313864908 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.5326797385620915, + "acc_stderr,none": 0.020184583359102202 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6272727272727273, + "acc_stderr,none": 0.04631381319425464 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.6285714285714286, + "acc_stderr,none": 0.030932858792789845 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.7562189054726368, + "acc_stderr,none": 0.03036049015401466 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.82, + "acc_stderr,none": 0.03861229196653694 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.4186489058039962, + "acc_stderr,none": 0.008528121437144745 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.28, + "acc_stderr,none": 0.04512608598542129 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.5185185185185185, + "acc_stderr,none": 0.04316378599511324 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.5592105263157895, + "acc_stderr,none": 0.04040311062490436 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.5277777777777778, + "acc_stderr,none": 0.04174752578923185 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.32, + "acc_stderr,none": 0.046882617226215034 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.47, + "acc_stderr,none": 0.05016135580465919 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.31, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.27450980392156865, + "acc_stderr,none": 0.04440521906179326 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.62, + "acc_stderr,none": 0.048783173121456316 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.4085106382978723, + "acc_stderr,none": 0.03213418026701576 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5310344827586206, + "acc_stderr,none": 0.04158632762097828 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.31216931216931215, + "acc_stderr,none": 0.023865206836972585 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.6258064516129033, + "acc_stderr,none": 0.027528904299845704 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.43842364532019706, + "acc_stderr,none": 0.03491207857486519 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.58, + "acc_stderr,none": 0.049604496374885836 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.2740740740740741, + "acc_stderr,none": 0.027195934804085626 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.31788079470198677, + "acc_stderr,none": 0.038020397601079024 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.3194444444444444, + "acc_stderr,none": 0.03179876342176851 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.3392857142857143, + "acc_stderr,none": 0.04493949068613541 + } + }, + "task_info": { + "model": "TheBloke/Llama-2-13B-chat-AWQ", + "revision": "main", + "private": false, + "params": 7.25, + "architectures": "LlamaForCausalLM", + "quant_type": "AWQ", + "precision": "4bit", + "model_params": 12.79, + "model_size": 7.25, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-05-10T07:46:17Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "quant_method": "awq", + "zero_point": true, + "group_size": 128, + "bits": 4, + "version": "gemm" + }, + "versions": { + "harness|arc:challenge|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|boolq|0": 2.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|hellaswag|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|piqa|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|openbookqa|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715337875.2237763, + "config": { + "model": "hf", + "model_args": "pretrained=TheBloke/Llama-2-13B-chat-AWQ,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 1, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/TheBloke/results_2024-05-11-21-17-09.json b/TheBloke/results_2024-05-11-21-17-09.json new file mode 100644 index 0000000000000000000000000000000000000000..f839371e8e6e172a177d5e9706110e86b8758cf8 --- /dev/null +++ b/TheBloke/results_2024-05-11-21-17-09.json @@ -0,0 +1,586 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-11-21-17-09", + "total_evaluation_time_secondes": "", + "model_name": "TheBloke/Mistral-7B-Instruct-v0.2-GPTQ", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 4.16, + "model_params": 7.04, + "quant_type": "GPTQ", + "precision": "4bit" + }, + "results": { + "harness|truthfulqa:mc1|0": { + "acc,none": 0.5067319461444308, + "acc_stderr,none": 0.017501914492655382, + "alias": "truthfulqa_mc1" + }, + "harness|winogrande|0": { + "acc,none": 0.7324388318863457, + "acc_stderr,none": 0.012441718456893009, + "alias": "winogrande" + }, + "harness|hellaswag|0": { + "acc,none": 0.6537542322246565, + "acc_stderr,none": 0.004748003276466214, + "acc_norm,none": 0.8312089225253934, + "acc_norm_stderr,none": 0.003738017734037975, + "alias": "hellaswag" + }, + "harness|boolq|0": { + "acc,none": 0.8431192660550458, + "acc_stderr,none": 0.0063609481079962785, + "alias": "boolq" + }, + "harness|openbookqa|0": { + "acc,none": 0.328, + "acc_stderr,none": 0.021017027165175495, + "acc_norm,none": 0.438, + "acc_norm_stderr,none": 0.022210326363977413, + "alias": "openbookqa" + }, + "harness|mmlu|0": { + "acc,none": 0.5804016521862982, + "acc_stderr,none": 0.003961191476321839, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5341126461211477, + "acc_stderr,none": 0.0068607425916126594 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.3492063492063492, + "acc_stderr,none": 0.04263906892795133 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7151515151515152, + "acc_stderr,none": 0.03524390844511781 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.7745098039215687, + "acc_stderr,none": 0.02933116229425172 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.7805907172995781, + "acc_stderr,none": 0.026939106581553945 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.7355371900826446, + "acc_stderr,none": 0.040261875275912046 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7129629629629629, + "acc_stderr,none": 0.043733130409147614 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7484662576687117, + "acc_stderr,none": 0.03408997886857529 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.6358381502890174, + "acc_stderr,none": 0.025906632631016113 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.34301675977653634, + "acc_stderr,none": 0.015876912673057752 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6302250803858521, + "acc_stderr,none": 0.027417996705630995 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.6697530864197531, + "acc_stderr,none": 0.026168298456732842 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.4172099087353325, + "acc_stderr,none": 0.012593959992906424 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.8187134502923976, + "acc_stderr,none": 0.029547741687640038 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.6462825877051819, + "acc_stderr,none": 0.008254745930389685 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.58, + "acc_stderr,none": 0.049604496374885836 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.6830188679245283, + "acc_stderr,none": 0.02863723563980089 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.5722543352601156, + "acc_stderr,none": 0.03772446857518027 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.33, + "acc_stderr,none": 0.04725815626252606 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6053811659192825, + "acc_stderr,none": 0.03280400504755291 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.7475728155339806, + "acc_stderr,none": 0.04301250399690878 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8589743589743589, + "acc_stderr,none": 0.022801382534597524 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.62, + "acc_stderr,none": 0.04878317312145632 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.7701149425287356, + "acc_stderr,none": 0.015046301846691807 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.6274509803921569, + "acc_stderr,none": 0.027684181883302895 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.44680851063829785, + "acc_stderr,none": 0.029658235097666907 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.625, + "acc_stderr,none": 0.029408372932278746 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.42771084337349397, + "acc_stderr,none": 0.038515976837185335 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.677608059798505, + "acc_stderr,none": 0.008221694733283947 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.39473684210526316, + "acc_stderr,none": 0.04598188057816542 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7424242424242424, + "acc_stderr,none": 0.031156269519646843 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.7979274611398963, + "acc_stderr,none": 0.02897908979429673 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.558974358974359, + "acc_stderr,none": 0.02517404838400075 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.6386554621848739, + "acc_stderr,none": 0.031204691225150016 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.7743119266055046, + "acc_stderr,none": 0.017923087667803057 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.7022900763358778, + "acc_stderr,none": 0.040103589424622034 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.5915032679738562, + "acc_stderr,none": 0.019886221037501862 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6636363636363637, + "acc_stderr,none": 0.04525393596302505 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.7061224489795919, + "acc_stderr,none": 0.029162738410249772 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.8208955223880597, + "acc_stderr,none": 0.027113286753111837 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.82, + "acc_stderr,none": 0.038612291966536934 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.48969235648588644, + "acc_stderr,none": 0.008709716985915076 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.34, + "acc_stderr,none": 0.047609522856952365 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.5777777777777777, + "acc_stderr,none": 0.04266763404099582 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.6052631578947368, + "acc_stderr,none": 0.039777499346220734 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.6388888888888888, + "acc_stderr,none": 0.04016660030451233 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.4, + "acc_stderr,none": 0.04923659639173309 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.54, + "acc_stderr,none": 0.05009082659620332 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.36, + "acc_stderr,none": 0.048241815132442176 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.43137254901960786, + "acc_stderr,none": 0.04928099597287534 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.66, + "acc_stderr,none": 0.04760952285695237 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.4978723404255319, + "acc_stderr,none": 0.03268572658667492 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5586206896551724, + "acc_stderr,none": 0.04137931034482758 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.3994708994708995, + "acc_stderr,none": 0.02522545028406788 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.6580645161290323, + "acc_stderr,none": 0.026985289576552746 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.49261083743842365, + "acc_stderr,none": 0.03517603540361008 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.64, + "acc_stderr,none": 0.04824181513244218 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.34444444444444444, + "acc_stderr,none": 0.02897264888484427 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.3708609271523179, + "acc_stderr,none": 0.03943966699183629 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.4212962962962963, + "acc_stderr,none": 0.03367462138896078 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.45535714285714285, + "acc_stderr,none": 0.04726835553719099 + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.6730758600725846, + "acc_stderr,none": 0.015085602828695083, + "alias": "truthfulqa_mc2" + }, + "harness|arc:challenge|0": { + "acc,none": 0.5477815699658704, + "acc_stderr,none": 0.014544519880633827, + "acc_norm,none": 0.560580204778157, + "acc_norm_stderr,none": 0.014503747823580129, + "alias": "arc_challenge" + }, + "harness|arc:easy|0": { + "acc,none": 0.8152356902356902, + "acc_stderr,none": 0.007963772171570785, + "acc_norm,none": 0.7609427609427609, + "acc_norm_stderr,none": 0.008751754723580422, + "alias": "arc_easy" + }, + "harness|piqa|0": { + "acc,none": 0.7970620239390642, + "acc_stderr,none": 0.009383679003767338, + "acc_norm,none": 0.8025027203482046, + "acc_norm_stderr,none": 0.009288578108523262, + "alias": "piqa" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.542202154991171, + "perplexity_stderr,none": 0.07532470166471553, + "acc,none": 0.7110421113914225, + "acc_stderr,none": 0.00631505317377688, + "alias": "lambada_openai" + } + }, + "task_info": { + "model": "TheBloke/Mistral-7B-Instruct-v0.2-GPTQ", + "revision": "main", + "private": false, + "params": 4.16, + "architectures": "MistralForCausalLM", + "quant_type": "GPTQ", + "precision": "4bit", + "model_params": 7.04, + "model_size": 4.16, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-05-10T05:47:33Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "bits": 4, + "group_size": 128, + "damp_percent": 0.1, + "desc_act": true, + "sym": true, + "true_sequential": true, + "model_name_or_path": null, + "model_file_base_name": "model", + "quant_method": "gptq" + }, + "versions": { + "harness|truthfulqa:mc1|0": 2.0, + "harness|winogrande|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|boolq|0": 2.0, + "harness|openbookqa|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|arc:challenge|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|piqa|0": 1.0, + "harness|lambada:openai|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715428953.698051, + "config": { + "model": "hf", + "model_args": "pretrained=TheBloke/Mistral-7B-Instruct-v0.2-GPTQ,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/TheBloke/results_2024-05-12-19-26-44.json b/TheBloke/results_2024-05-12-19-26-44.json new file mode 100644 index 0000000000000000000000000000000000000000..45674c90df6993511d5a3a910c124458ded0a842 --- /dev/null +++ b/TheBloke/results_2024-05-12-19-26-44.json @@ -0,0 +1,586 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-12-19-26-44", + "total_evaluation_time_secondes": "", + "model_name": "TheBloke/Llama-2-13B-chat-GPTQ", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 7.26, + "model_params": 12.8, + "quant_type": "GPTQ", + "precision": "4bit" + }, + "results": { + "harness|mmlu|0": { + "acc,none": 0.5139581256231306, + "acc_stderr,none": 0.004007227446679919, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.4767268862911796, + "acc_stderr,none": 0.006863000718421932 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.2619047619047619, + "acc_stderr,none": 0.03932537680392871 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.6303030303030303, + "acc_stderr,none": 0.03769430314512567 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.7009803921568627, + "acc_stderr,none": 0.03213325717373616 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.7172995780590717, + "acc_stderr,none": 0.029312814153955914 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.6776859504132231, + "acc_stderr,none": 0.04266416363352168 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.6851851851851852, + "acc_stderr,none": 0.04489931073591312 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.6809815950920245, + "acc_stderr,none": 0.03661997551073836 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.5895953757225434, + "acc_stderr,none": 0.02648339204209818 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.24916201117318434, + "acc_stderr,none": 0.01446589382985993 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6077170418006431, + "acc_stderr,none": 0.027731258647011994 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.5864197530864198, + "acc_stderr,none": 0.027402042040269955 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.38722294654498046, + "acc_stderr,none": 0.012441155326854933 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7368421052631579, + "acc_stderr,none": 0.03377310252209206 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.5880270357257805, + "acc_stderr,none": 0.008506546676113863 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.49, + "acc_stderr,none": 0.05024183937956911 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.5509433962264151, + "acc_stderr,none": 0.030612730713641095 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.43352601156069365, + "acc_stderr,none": 0.03778621079092055 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.32, + "acc_stderr,none": 0.04688261722621504 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6143497757847534, + "acc_stderr,none": 0.03266842214289202 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.6796116504854369, + "acc_stderr,none": 0.04620284082280041 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.7649572649572649, + "acc_stderr,none": 0.02777883590493543 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.61, + "acc_stderr,none": 0.04902071300001976 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.7458492975734355, + "acc_stderr,none": 0.015569254692045778 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.5718954248366013, + "acc_stderr,none": 0.028332397483664274 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.3900709219858156, + "acc_stderr,none": 0.02909767559946393 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.48161764705882354, + "acc_stderr,none": 0.030352303395351964 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.46987951807228917, + "acc_stderr,none": 0.03885425420866767 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.5976600584985375, + "acc_stderr,none": 0.008593939236131217 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.2719298245614035, + "acc_stderr,none": 0.04185774424022056 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.6464646464646465, + "acc_stderr,none": 0.03406086723547153 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.7305699481865285, + "acc_stderr,none": 0.03201867122877793 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.47435897435897434, + "acc_stderr,none": 0.02531764972644866 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.4831932773109244, + "acc_stderr,none": 0.03246013680375308 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.7009174311926606, + "acc_stderr,none": 0.019630417285415182 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.6106870229007634, + "acc_stderr,none": 0.042764865428145914 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.5261437908496732, + "acc_stderr,none": 0.020200164564804588 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.5636363636363636, + "acc_stderr,none": 0.04750185058907297 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.673469387755102, + "acc_stderr,none": 0.030021056238440307 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.7412935323383084, + "acc_stderr,none": 0.030965903123573026 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.79, + "acc_stderr,none": 0.040936018074033256 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.4148430066603235, + "acc_stderr,none": 0.008544388493151839 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.32, + "acc_stderr,none": 0.04688261722621504 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.48148148148148145, + "acc_stderr,none": 0.043163785995113245 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.5657894736842105, + "acc_stderr,none": 0.04033565667848319 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.5416666666666666, + "acc_stderr,none": 0.04166666666666666 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.32, + "acc_stderr,none": 0.046882617226215034 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.44, + "acc_stderr,none": 0.04988876515698589 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.3137254901960784, + "acc_stderr,none": 0.04617034827006718 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.65, + "acc_stderr,none": 0.0479372485441102 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.39574468085106385, + "acc_stderr,none": 0.031967586978353627 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5103448275862069, + "acc_stderr,none": 0.04165774775728763 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.31746031746031744, + "acc_stderr,none": 0.02397386199899208 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.5903225806451613, + "acc_stderr,none": 0.027976054915347364 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.43842364532019706, + "acc_stderr,none": 0.03491207857486519 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.58, + "acc_stderr,none": 0.049604496374885836 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.25555555555555554, + "acc_stderr,none": 0.026593939101844082 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.304635761589404, + "acc_stderr,none": 0.03757949922943342 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.33796296296296297, + "acc_stderr,none": 0.03225941352631295 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.3482142857142857, + "acc_stderr,none": 0.04521829902833585 + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.26805385556915545, + "acc_stderr,none": 0.015506204722834547, + "alias": "truthfulqa_mc1" + }, + "harness|arc:challenge|0": { + "acc,none": 0.4616040955631399, + "acc_stderr,none": 0.014568245550296358, + "acc_norm,none": 0.4863481228668942, + "acc_norm_stderr,none": 0.014605943429860947, + "alias": "arc_challenge" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.41768405068814446, + "acc_stderr,none": 0.015583992889631661, + "alias": "truthfulqa_mc2" + }, + "harness|arc:easy|0": { + "acc,none": 0.7714646464646465, + "acc_stderr,none": 0.008615944722488472, + "acc_norm,none": 0.7386363636363636, + "acc_norm_stderr,none": 0.00901583836660819, + "alias": "arc_easy" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.01529524411286, + "perplexity_stderr,none": 0.07297248309564458, + "acc,none": 0.7273432951678633, + "acc_stderr,none": 0.0062042584889067335, + "alias": "lambada_openai" + }, + "harness|boolq|0": { + "acc,none": 0.8119266055045872, + "acc_stderr,none": 0.006834623690939715, + "alias": "boolq" + }, + "harness|winogrande|0": { + "acc,none": 0.6977111286503551, + "acc_stderr,none": 0.012907200361627541, + "alias": "winogrande" + }, + "harness|hellaswag|0": { + "acc,none": 0.5992830113523202, + "acc_stderr,none": 0.004890422457747264, + "acc_norm,none": 0.7880900219079865, + "acc_norm_stderr,none": 0.004078262107595482, + "alias": "hellaswag" + }, + "harness|piqa|0": { + "acc,none": 0.7780195865070729, + "acc_stderr,none": 0.009696120744661996, + "acc_norm,none": 0.7905331882480957, + "acc_norm_stderr,none": 0.009494302979819808, + "alias": "piqa" + }, + "harness|openbookqa|0": { + "acc,none": 0.342, + "acc_stderr,none": 0.02123614719989926, + "acc_norm,none": 0.426, + "acc_norm_stderr,none": 0.022136577335085637, + "alias": "openbookqa" + } + }, + "task_info": { + "model": "TheBloke/Llama-2-13B-chat-GPTQ", + "revision": "main", + "private": false, + "params": 7.26, + "architectures": "LlamaForCausalLM", + "quant_type": "GPTQ", + "precision": "4bit", + "model_params": 12.8, + "model_size": 7.26, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Waiting", + "submitted_time": "2024-05-10T07:50:09Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "bits": 4, + "group_size": 128, + "damp_percent": 0.01, + "desc_act": false, + "sym": true, + "true_sequential": true, + "model_name_or_path": null, + "model_file_base_name": "model", + "quant_method": "gptq" + }, + "versions": { + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|arc:challenge|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|arc:easy|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|boolq|0": 2.0, + "harness|winogrande|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|piqa|0": 1.0, + "harness|openbookqa|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715502446.1674964, + "config": { + "model": "hf", + "model_args": "pretrained=TheBloke/Llama-2-13B-chat-GPTQ,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 1, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/alokabhishek/results_2024-05-08-02-05-18.json b/alokabhishek/results_2024-05-08-02-05-18.json new file mode 100644 index 0000000000000000000000000000000000000000..114368088c546d5747b381716d93c747f40b1dd9 --- /dev/null +++ b/alokabhishek/results_2024-05-08-02-05-18.json @@ -0,0 +1,589 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-08-02-05-18", + "total_evaluation_time_secondes": "", + "model_name": "alokabhishek/falcon-7b-instruct-bnb-4bit", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 4.01, + "model_params": 6.83, + "quant_type": "bitsandbytes", + "precision": "4bit" + }, + "results": { + "harness|mmlu|0": { + "acc,none": 0.2454778521578123, + "acc_stderr,none": 0.0036257236339441393, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.24782146652497344, + "acc_stderr,none": 0.006287119040079848 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.2857142857142857, + "acc_stderr,none": 0.04040610178208841 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.24848484848484848, + "acc_stderr,none": 0.033744026441394036 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.25980392156862747, + "acc_stderr,none": 0.030778554678693254 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.26582278481012656, + "acc_stderr,none": 0.028756799629658335 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.256198347107438, + "acc_stderr,none": 0.03984979653302871 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.28703703703703703, + "acc_stderr,none": 0.043733130409147614 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.19631901840490798, + "acc_stderr,none": 0.031207970394709218 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.2658959537572254, + "acc_stderr,none": 0.023786203255508287 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.23910614525139665, + "acc_stderr,none": 0.014265554192331154 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.20257234726688103, + "acc_stderr,none": 0.022827317491059675 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.2222222222222222, + "acc_stderr,none": 0.023132376234543325 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.24445893089960888, + "acc_stderr,none": 0.010976425013113902 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.3684210526315789, + "acc_stderr,none": 0.036996580176568775 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.261345349211458, + "acc_stderr,none": 0.007864211382806421 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.34, + "acc_stderr,none": 0.04760952285695236 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.2188679245283019, + "acc_stderr,none": 0.02544786382510861 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.23699421965317918, + "acc_stderr,none": 0.03242414757483099 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.21, + "acc_stderr,none": 0.040936018074033256 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.33183856502242154, + "acc_stderr,none": 0.031602951437766785 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.2524271844660194, + "acc_stderr,none": 0.04301250399690877 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.29914529914529914, + "acc_stderr,none": 0.029996951858349483 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.31, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.2720306513409962, + "acc_stderr,none": 0.01591336744750051 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.23202614379084968, + "acc_stderr,none": 0.024170840879341016 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.2553191489361702, + "acc_stderr,none": 0.026011992930902 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.1875, + "acc_stderr,none": 0.023709788253811766 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.30120481927710846, + "acc_stderr,none": 0.0357160923005348 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.23951901202469938, + "acc_stderr,none": 0.00768774913583545 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.2631578947368421, + "acc_stderr,none": 0.04142439719489359 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.25757575757575757, + "acc_stderr,none": 0.031156269519646836 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.20207253886010362, + "acc_stderr,none": 0.02897908979429673 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.24102564102564103, + "acc_stderr,none": 0.021685546665333184 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.24789915966386555, + "acc_stderr,none": 0.028047967224176892 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.20550458715596331, + "acc_stderr,none": 0.017324352325016015 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.2900763358778626, + "acc_stderr,none": 0.03980066246467765 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.2549019607843137, + "acc_stderr,none": 0.017630827375148383 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.3181818181818182, + "acc_stderr,none": 0.04461272175910508 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.19591836734693877, + "acc_stderr,none": 0.025409301953225678 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.22388059701492538, + "acc_stderr,none": 0.02947525023601719 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.23215984776403426, + "acc_stderr,none": 0.007508345730856319 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.22, + "acc_stderr,none": 0.04163331998932269 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.2, + "acc_stderr,none": 0.03455473702325437 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.18421052631578946, + "acc_stderr,none": 0.0315469804508223 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.2986111111111111, + "acc_stderr,none": 0.03827052357950756 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.22, + "acc_stderr,none": 0.04163331998932269 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.24, + "acc_stderr,none": 0.042923469599092816 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.22, + "acc_stderr,none": 0.041633319989322695 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.24509803921568626, + "acc_stderr,none": 0.04280105837364397 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.31, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.2936170212765957, + "acc_stderr,none": 0.029771642712491227 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.23448275862068965, + "acc_stderr,none": 0.035306258743465914 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.20899470899470898, + "acc_stderr,none": 0.020940481565334835 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.23548387096774193, + "acc_stderr,none": 0.024137632429337724 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.18226600985221675, + "acc_stderr,none": 0.02716334085964515 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.29, + "acc_stderr,none": 0.045604802157206845 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.22592592592592592, + "acc_stderr,none": 0.025497532639609542 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.23841059602649006, + "acc_stderr,none": 0.03479185572599661 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.1712962962962963, + "acc_stderr,none": 0.02569534164382468 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.29464285714285715, + "acc_stderr,none": 0.043270409325787296 + }, + "harness|arc:easy|0": { + "acc,none": 0.7285353535353535, + "acc_stderr,none": 0.009125362970360623, + "acc_norm,none": 0.6763468013468014, + "acc_norm_stderr,none": 0.009600478182273785, + "alias": "arc_easy" + }, + "harness|hellaswag|0": { + "acc,none": 0.5119498107946624, + "acc_stderr,none": 0.004988356146499022, + "acc_norm,none": 0.6914957179844653, + "acc_norm_stderr,none": 0.004609320024893916, + "alias": "hellaswag" + }, + "harness|piqa|0": { + "acc,none": 0.7823721436343852, + "acc_stderr,none": 0.009627407474840874, + "acc_norm,none": 0.7861806311207835, + "acc_norm_stderr,none": 0.009565994206915592, + "alias": "piqa" + }, + "harness|boolq|0": { + "acc,none": 0.7125382262996942, + "acc_stderr,none": 0.007915651663295326, + "alias": "boolq" + }, + "harness|lambada:openai|0": { + "perplexity,none": 5.298423800158154, + "perplexity_stderr,none": 0.12158416023239337, + "acc,none": 0.634775858723074, + "acc_stderr,none": 0.006708138364946144, + "alias": "lambada_openai" + }, + "harness|openbookqa|0": { + "acc,none": 0.3, + "acc_stderr,none": 0.020514426225628043, + "acc_norm,none": 0.408, + "acc_norm_stderr,none": 0.02200091089387719, + "alias": "openbookqa" + }, + "harness|arc:challenge|0": { + "acc,none": 0.39761092150170646, + "acc_stderr,none": 0.014301752223279528, + "acc_norm,none": 0.4189419795221843, + "acc_norm_stderr,none": 0.01441810695363901, + "alias": "arc_challenge" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.44307675898066345, + "acc_stderr,none": 0.014804938982947631, + "alias": "truthfulqa_mc2" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.2876376988984088, + "acc_stderr,none": 0.015846315101394795, + "alias": "truthfulqa_mc1" + }, + "harness|winogrande|0": { + "acc,none": 0.6558800315706393, + "acc_stderr,none": 0.013352121905005935, + "alias": "winogrande" + } + }, + "task_info": { + "model": "alokabhishek/falcon-7b-instruct-bnb-4bit", + "revision": "main", + "private": false, + "params": 14.852, + "architectures": "FalconForCausalLM", + "quant_type": "bitsandbytes", + "precision": "4bit", + "model_params": 29.704, + "model_size": 14.852, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-05-07T08:53:20Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "_load_in_4bit": true, + "_load_in_8bit": false, + "bnb_4bit_compute_dtype": "bfloat16", + "bnb_4bit_quant_type": "nf4", + "bnb_4bit_use_double_quant": true, + "llm_int8_enable_fp32_cpu_offload": false, + "llm_int8_has_fp16_weight": false, + "llm_int8_skip_modules": null, + "llm_int8_threshold": 6.0, + "load_in_4bit": true, + "load_in_8bit": false, + "quant_method": "bitsandbytes" + }, + "versions": { + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|arc:easy|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|piqa|0": 1.0, + "harness|boolq|0": 2.0, + "harness|lambada:openai|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|winogrande|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715072170.8339446, + "config": { + "model": "hf", + "model_args": "pretrained=alokabhishek/falcon-7b-instruct-bnb-4bit,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 1, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/astronomer/results_2024-05-13-17-16-12.json b/astronomer/results_2024-05-13-17-16-12.json new file mode 100644 index 0000000000000000000000000000000000000000..dbe2e3844bddc1dc7d9c8765e265305dc47b2b50 --- /dev/null +++ b/astronomer/results_2024-05-13-17-16-12.json @@ -0,0 +1,588 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-13-17-16-12", + "total_evaluation_time_secondes": "", + "model_name": "astronomer/Llama-3-8B-Instruct-GPTQ-4-Bit", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 5.74, + "model_params": 7.04, + "quant_type": "GPTQ", + "precision": "4bit" + }, + "results": { + "harness|winogrande|0": { + "acc,none": 0.7134964483030781, + "acc_stderr,none": 0.01270703013996038, + "alias": "winogrande" + }, + "harness|piqa|0": { + "acc,none": 0.7698585418933623, + "acc_stderr,none": 0.009820832826839817, + "acc_norm,none": 0.7698585418933623, + "acc_norm_stderr,none": 0.009820832826839815, + "alias": "piqa" + }, + "harness|boolq|0": { + "acc,none": 0.8201834862385321, + "acc_stderr,none": 0.006716806494844575, + "alias": "boolq" + }, + "harness|hellaswag|0": { + "acc,none": 0.5652260505875324, + "acc_stderr,none": 0.004947141797384123, + "acc_norm,none": 0.7495518820952002, + "acc_norm_stderr,none": 0.004323856300539162, + "alias": "hellaswag" + }, + "harness|arc:easy|0": { + "acc,none": 0.8034511784511784, + "acc_stderr,none": 0.008154233832067922, + "acc_norm,none": 0.7828282828282829, + "acc_norm_stderr,none": 0.008460637338999105, + "alias": "arc_easy" + }, + "harness|openbookqa|0": { + "acc,none": 0.314, + "acc_stderr,none": 0.020776701920308997, + "acc_norm,none": 0.426, + "acc_norm_stderr,none": 0.022136577335085637, + "alias": "openbookqa" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.5058067674960235, + "acc_stderr,none": 0.01513541322967859, + "alias": "truthfulqa_mc2" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.34149326805385555, + "acc_stderr,none": 0.016600688619950826, + "alias": "truthfulqa_mc1" + }, + "harness|mmlu|0": { + "acc,none": 0.6135165930779092, + "acc_stderr,none": 0.00389230425794973, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5621679064824655, + "acc_stderr,none": 0.006801371377907809 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.42063492063492064, + "acc_stderr,none": 0.04415438226743744 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7393939393939394, + "acc_stderr,none": 0.034277431758165236 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.8137254901960784, + "acc_stderr,none": 0.027325470966716326 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.8143459915611815, + "acc_stderr,none": 0.025310495376944867 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.7520661157024794, + "acc_stderr,none": 0.03941897526516302 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7962962962962963, + "acc_stderr,none": 0.03893542518824849 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7239263803680982, + "acc_stderr,none": 0.03512385283705048 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.6763005780346821, + "acc_stderr,none": 0.02519018132760842 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.3329608938547486, + "acc_stderr,none": 0.01576171617839756 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.707395498392283, + "acc_stderr,none": 0.02583989833487798 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.7037037037037037, + "acc_stderr,none": 0.025407197798890165 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.46088657105606257, + "acc_stderr,none": 0.012731102790504524 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7543859649122807, + "acc_stderr,none": 0.03301405946987251 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.6935951078210493, + "acc_stderr,none": 0.008023000727075687 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.61, + "acc_stderr,none": 0.04902071300001974 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.690566037735849, + "acc_stderr,none": 0.028450154794118634 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.6416184971098265, + "acc_stderr,none": 0.03656343653353159 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.41, + "acc_stderr,none": 0.049431107042371025 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6233183856502242, + "acc_stderr,none": 0.032521134899291884 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.8349514563106796, + "acc_stderr,none": 0.036756688322331886 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8846153846153846, + "acc_stderr,none": 0.02093019318517933 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.75, + "acc_stderr,none": 0.04351941398892446 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.789272030651341, + "acc_stderr,none": 0.014583812465862546 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.7091503267973857, + "acc_stderr,none": 0.02600480036395213 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.5354609929078015, + "acc_stderr,none": 0.02975238965742705 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.6691176470588235, + "acc_stderr,none": 0.02858270975389844 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.5060240963855421, + "acc_stderr,none": 0.03892212195333045 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.7114072148196295, + "acc_stderr,none": 0.00803029096887605 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.45614035087719296, + "acc_stderr,none": 0.04685473041907789 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7474747474747475, + "acc_stderr,none": 0.030954055470365907 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8186528497409327, + "acc_stderr,none": 0.027807032360686088 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.6230769230769231, + "acc_stderr,none": 0.024570975364225995 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.6680672268907563, + "acc_stderr,none": 0.03058869701378364 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.8055045871559633, + "acc_stderr,none": 0.016970289090458057 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.7480916030534351, + "acc_stderr,none": 0.03807387116306086 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.6535947712418301, + "acc_stderr,none": 0.01924978569171721 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6818181818181818, + "acc_stderr,none": 0.04461272175910509 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.7061224489795919, + "acc_stderr,none": 0.029162738410249762 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.8009950248756219, + "acc_stderr,none": 0.028231365092758406 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.83, + "acc_stderr,none": 0.0377525168068637 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.5156993339676499, + "acc_stderr,none": 0.00857673801503054 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.33, + "acc_stderr,none": 0.047258156262526045 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.6074074074074074, + "acc_stderr,none": 0.04218506215368879 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.6578947368421053, + "acc_stderr,none": 0.03860731599316092 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.7361111111111112, + "acc_stderr,none": 0.03685651095897532 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.39, + "acc_stderr,none": 0.04902071300001975 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.47, + "acc_stderr,none": 0.05016135580465919 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.26, + "acc_stderr,none": 0.04408440022768079 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.4803921568627451, + "acc_stderr,none": 0.04971358884367405 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.72, + "acc_stderr,none": 0.045126085985421296 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.5531914893617021, + "acc_stderr,none": 0.032500536843658404 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.6137931034482759, + "acc_stderr,none": 0.04057324734419036 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.4074074074074074, + "acc_stderr,none": 0.025305906241590632 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7419354838709677, + "acc_stderr,none": 0.024892469172462843 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.47783251231527096, + "acc_stderr,none": 0.03514528562175008 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.64, + "acc_stderr,none": 0.04824181513244218 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.3592592592592593, + "acc_stderr,none": 0.029252905927251976 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.423841059602649, + "acc_stderr,none": 0.04034846678603397 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.4583333333333333, + "acc_stderr,none": 0.033981108902946366 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.42857142857142855, + "acc_stderr,none": 0.04697113923010212 + }, + "harness|arc:challenge|0": { + "acc,none": 0.5068259385665529, + "acc_stderr,none": 0.014610029151379813, + "acc_norm,none": 0.5392491467576792, + "acc_norm_stderr,none": 0.014566303676636588, + "alias": "arc_challenge" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.5080767853820873, + "perplexity_stderr,none": 0.09030328878014143, + "acc,none": 0.6993984086939646, + "acc_stderr,none": 0.006388075353174957, + "alias": "lambada_openai" + } + }, + "task_info": { + "model": "astronomer/Llama-3-8B-Instruct-GPTQ-4-Bit", + "revision": "main", + "private": false, + "params": 5.74, + "architectures": "LlamaForCausalLM", + "quant_type": "GPTQ", + "precision": "4bit", + "model_params": 7.04, + "model_size": 5.74, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-05-10T04:42:46Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "bits": 4, + "damp_percent": 0.1, + "desc_act": true, + "group_size": 128, + "is_marlin_format": false, + "model_file_base_name": null, + "model_name_or_path": null, + "quant_method": "gptq", + "static_groups": false, + "sym": true, + "true_sequential": true + }, + "versions": { + "harness|winogrande|0": 1.0, + "harness|piqa|0": 1.0, + "harness|boolq|0": 2.0, + "harness|hellaswag|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|arc:challenge|0": 1.0, + "harness|lambada:openai|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715587328.967321, + "config": { + "model": "hf", + "model_args": "pretrained=astronomer/Llama-3-8B-Instruct-GPTQ-4-Bit,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/baichuan-inc/results_2024-05-13-19-42-01.json b/baichuan-inc/results_2024-05-13-19-42-01.json new file mode 100644 index 0000000000000000000000000000000000000000..3d60722522a281868f8d520c68e725af34a905ab --- /dev/null +++ b/baichuan-inc/results_2024-05-13-19-42-01.json @@ -0,0 +1,586 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-13-19-42-01", + "total_evaluation_time_secondes": "", + "model_name": "baichuan-inc/Baichuan2-7B-Chat-4bits", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 5.4, + "model_params": 7.0, + "quant_type": "bitsandbytes", + "precision": "4bit" + }, + "results": { + "harness|arc:easy|0": { + "acc,none": 0.726010101010101, + "acc_stderr,none": 0.009151805901544022, + "acc_norm,none": 0.678030303030303, + "acc_norm_stderr,none": 0.009587386696300385, + "alias": "arc_easy" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.3023255813953488, + "acc_stderr,none": 0.016077509266133026, + "alias": "truthfulqa_mc1" + }, + "harness|lambada:openai|0": { + "perplexity,none": 4.038756542051814, + "perplexity_stderr,none": 0.11540847819628168, + "acc,none": 0.672811954201436, + "acc_stderr,none": 0.006536686193974627, + "alias": "lambada_openai" + }, + "harness|winogrande|0": { + "acc,none": 0.67008681925809, + "acc_stderr,none": 0.013214432542517553, + "alias": "winogrande" + }, + "harness|piqa|0": { + "acc,none": 0.735038084874864, + "acc_stderr,none": 0.010296557993316075, + "acc_norm,none": 0.7383025027203483, + "acc_norm_stderr,none": 0.010255630772708229, + "alias": "piqa" + }, + "harness|mmlu|0": { + "acc,none": 0.5022076627261074, + "acc_stderr,none": 0.004038480145464961, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.46099893730074387, + "acc_stderr,none": 0.006891577589106509 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.373015873015873, + "acc_stderr,none": 0.04325506042017086 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.6484848484848484, + "acc_stderr,none": 0.037282069986826503 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.7009803921568627, + "acc_stderr,none": 0.03213325717373617 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.70042194092827, + "acc_stderr,none": 0.029818024749753095 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.628099173553719, + "acc_stderr,none": 0.044120158066245044 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.6111111111111112, + "acc_stderr,none": 0.0471282125742677 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.5705521472392638, + "acc_stderr,none": 0.03889066619112722 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.5317919075144508, + "acc_stderr,none": 0.02686462436675666 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.23575418994413408, + "acc_stderr,none": 0.014196375686290804 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.5819935691318328, + "acc_stderr,none": 0.028013651891995076 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.5771604938271605, + "acc_stderr,none": 0.027487472980871595 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.37809647979139505, + "acc_stderr,none": 0.012384878406798095 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7485380116959064, + "acc_stderr,none": 0.033275044238468436 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.5806243965239781, + "acc_stderr,none": 0.008620571172520262 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.57, + "acc_stderr,none": 0.04975698519562428 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.5547169811320755, + "acc_stderr,none": 0.030588052974270655 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.4913294797687861, + "acc_stderr,none": 0.038118909889404105 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.35, + "acc_stderr,none": 0.04793724854411022 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.5695067264573991, + "acc_stderr,none": 0.0332319730294294 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.6213592233009708, + "acc_stderr,none": 0.048026946982589726 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.7564102564102564, + "acc_stderr,none": 0.028120966503914394 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.54, + "acc_stderr,none": 0.05009082659620332 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.7037037037037037, + "acc_stderr,none": 0.016328814422102052 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.5849673202614379, + "acc_stderr,none": 0.028213504177824093 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.35815602836879434, + "acc_stderr,none": 0.02860208586275942 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.5330882352941176, + "acc_stderr,none": 0.030306257722468317 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.4939759036144578, + "acc_stderr,none": 0.03892212195333045 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.5794605134871629, + "acc_stderr,none": 0.008692685039668366 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.2807017543859649, + "acc_stderr,none": 0.04227054451232199 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.6464646464646465, + "acc_stderr,none": 0.03406086723547153 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.7202072538860104, + "acc_stderr,none": 0.032396370467357036 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.46923076923076923, + "acc_stderr,none": 0.025302958890850154 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.48739495798319327, + "acc_stderr,none": 0.03246816765752174 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.6844036697247706, + "acc_stderr,none": 0.019926117513869662 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.6183206106870229, + "acc_stderr,none": 0.04260735157644561 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.4950980392156863, + "acc_stderr,none": 0.020226862710039463 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6272727272727273, + "acc_stderr,none": 0.04631381319425465 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.6040816326530613, + "acc_stderr,none": 0.03130802899065685 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.7164179104477612, + "acc_stderr,none": 0.031871875379197966 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.67, + "acc_stderr,none": 0.04725815626252609 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.4110371075166508, + "acc_stderr,none": 0.008578848650999856 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.29, + "acc_stderr,none": 0.04560480215720683 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.4962962962962963, + "acc_stderr,none": 0.04319223625811331 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.5328947368421053, + "acc_stderr,none": 0.040601270352363966 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.4861111111111111, + "acc_stderr,none": 0.041795966175810016 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.38, + "acc_stderr,none": 0.04878317312145632 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.46, + "acc_stderr,none": 0.05009082659620333 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.31, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.2549019607843137, + "acc_stderr,none": 0.04336432707993176 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.63, + "acc_stderr,none": 0.048523658709391 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.41702127659574467, + "acc_stderr,none": 0.03223276266711712 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.45517241379310347, + "acc_stderr,none": 0.04149886942192118 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.29894179894179895, + "acc_stderr,none": 0.023577604791655795 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.5806451612903226, + "acc_stderr,none": 0.02807158890109184 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.39408866995073893, + "acc_stderr,none": 0.03438157967036543 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.48, + "acc_stderr,none": 0.050211673156867795 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.25925925925925924, + "acc_stderr,none": 0.02671924078371218 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.3576158940397351, + "acc_stderr,none": 0.03913453431177258 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.44907407407407407, + "acc_stderr,none": 0.03392238405321617 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.3482142857142857, + "acc_stderr,none": 0.04521829902833585 + }, + "harness|boolq|0": { + "acc,none": 0.7880733944954128, + "acc_stderr,none": 0.007147737811541546, + "alias": "boolq" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.4629458144430423, + "acc_stderr,none": 0.015589156879983877, + "alias": "truthfulqa_mc2" + }, + "harness|arc:challenge|0": { + "acc,none": 0.39078498293515357, + "acc_stderr,none": 0.014258563880513778, + "acc_norm,none": 0.4206484641638225, + "acc_norm_stderr,none": 0.014426211252508403, + "alias": "arc_challenge" + }, + "harness|openbookqa|0": { + "acc,none": 0.288, + "acc_stderr,none": 0.02027150383507522, + "acc_norm,none": 0.388, + "acc_norm_stderr,none": 0.021814300984787635, + "alias": "openbookqa" + }, + "harness|hellaswag|0": { + "acc,none": 0.5295757817167894, + "acc_stderr,none": 0.004981044370530789, + "acc_norm,none": 0.7046405098585939, + "acc_norm_stderr,none": 0.004552718360513105, + "alias": "hellaswag" + } + }, + "task_info": { + "model": "./Baichuan2-7B-Chat-4bits", + "revision": "main", + "private": false, + "params": 7.0, + "architectures": "BaichuanForCausalLM", + "quant_type": "bitsandbytes", + "precision": "4bit", + "model_params": 7.0, + "model_size": 5.4, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-29T07:30:28Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "bnb_4bit_compute_dtype": "bfloat16", + "bnb_4bit_quant_type": "nf4", + "bnb_4bit_use_double_quant": true, + "llm_int8_enable_fp32_cpu_offload": false, + "llm_int8_has_fp16_weight": false, + "llm_int8_skip_modules": null, + "llm_int8_threshold": 6.0, + "load_in_4bit": true, + "load_in_8bit": false + }, + "versions": { + "harness|arc:easy|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|lambada:openai|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|piqa|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|boolq|0": 2.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|arc:challenge|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|hellaswag|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715594337.0446994, + "config": { + "model": "hf", + "model_args": "pretrained=./Baichuan2-7B-Chat-4bits,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/baichuan-inc/results_2024-05-14-00-28-25.json b/baichuan-inc/results_2024-05-14-00-28-25.json new file mode 100644 index 0000000000000000000000000000000000000000..4704aeb8c343a8cdb7505d101d3df917eca7b444 --- /dev/null +++ b/baichuan-inc/results_2024-05-14-00-28-25.json @@ -0,0 +1,586 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-14-00-28-25", + "total_evaluation_time_secondes": "", + "model_name": "baichuan-inc/Baichuan2-13B-Chat-4bits", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 9.08, + "model_params": 13.0, + "quant_type": "bitsandbytes", + "precision": "4bit" + }, + "results": { + "harness|piqa|0": { + "acc,none": 0.7568008705114254, + "acc_stderr,none": 0.010009611953858943, + "acc_norm,none": 0.764961915125136, + "acc_norm_stderr,none": 0.00989314668880533, + "alias": "piqa" + }, + "harness|arc:easy|0": { + "acc,none": 0.7470538720538721, + "acc_stderr,none": 0.008919862739165623, + "acc_norm,none": 0.6948653198653199, + "acc_norm_stderr,none": 0.009448531094163909, + "alias": "arc_easy" + }, + "harness|arc:challenge|0": { + "acc,none": 0.4684300341296928, + "acc_stderr,none": 0.014582236460866977, + "acc_norm,none": 0.47440273037542663, + "acc_norm_stderr,none": 0.014592230885298964, + "alias": "arc_challenge" + }, + "harness|openbookqa|0": { + "acc,none": 0.32, + "acc_stderr,none": 0.020882340488761805, + "acc_norm,none": 0.428, + "acc_norm_stderr,none": 0.022149790663861923, + "alias": "openbookqa" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.4993847866633565, + "acc_stderr,none": 0.01580566629514981, + "alias": "truthfulqa_mc2" + }, + "harness|winogrande|0": { + "acc,none": 0.7063930544593529, + "acc_stderr,none": 0.012799397296204173, + "alias": "winogrande" + }, + "harness|hellaswag|0": { + "acc,none": 0.5663214499103765, + "acc_stderr,none": 0.004945691164810072, + "acc_norm,none": 0.7527384983071101, + "acc_norm_stderr,none": 0.004305383398710274, + "alias": "hellaswag" + }, + "harness|boolq|0": { + "acc,none": 0.8342507645259939, + "acc_stderr,none": 0.006503791548089842, + "alias": "boolq" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.2218982936112495, + "perplexity_stderr,none": 0.0814591097593229, + "acc,none": 0.7106539879681739, + "acc_stderr,none": 0.00631756795443543, + "alias": "lambada_openai" + }, + "harness|mmlu|0": { + "acc,none": 0.5554052129326307, + "acc_stderr,none": 0.0039702777358831525, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5090329436769394, + "acc_stderr,none": 0.006845596956692873 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.35714285714285715, + "acc_stderr,none": 0.04285714285714281 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7272727272727273, + "acc_stderr,none": 0.03477691162163659 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.7598039215686274, + "acc_stderr,none": 0.02998373305591361 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.759493670886076, + "acc_stderr,none": 0.027820781981149678 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.6942148760330579, + "acc_stderr,none": 0.04205953933884124 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.6759259259259259, + "acc_stderr,none": 0.04524596007030048 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.6993865030674846, + "acc_stderr,none": 0.0360251131880677 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.6040462427745664, + "acc_stderr,none": 0.02632981334194624 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.2558659217877095, + "acc_stderr,none": 0.014593620923210737 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.5819935691318328, + "acc_stderr,none": 0.028013651891995072 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.6419753086419753, + "acc_stderr,none": 0.026675611926037082 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.4322033898305085, + "acc_stderr,none": 0.012652297777114968 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.783625730994152, + "acc_stderr,none": 0.031581495393387324 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.6337302864499518, + "acc_stderr,none": 0.00836658956166198 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.58, + "acc_stderr,none": 0.049604496374885836 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.6188679245283019, + "acc_stderr,none": 0.02989060968628664 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.5722543352601156, + "acc_stderr,none": 0.037724468575180276 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.35, + "acc_stderr,none": 0.047937248544110196 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6143497757847534, + "acc_stderr,none": 0.03266842214289201 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.7572815533980582, + "acc_stderr,none": 0.04245022486384495 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8290598290598291, + "acc_stderr,none": 0.024662496845209818 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.62, + "acc_stderr,none": 0.04878317312145632 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.7650063856960408, + "acc_stderr,none": 0.015162024152278443 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.5947712418300654, + "acc_stderr,none": 0.02811092849280907 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.46099290780141844, + "acc_stderr,none": 0.02973659252642444 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.5698529411764706, + "acc_stderr,none": 0.030074971917302875 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.4578313253012048, + "acc_stderr,none": 0.0387862677100236 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.6421839454013649, + "acc_stderr,none": 0.008410443495026124 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.39473684210526316, + "acc_stderr,none": 0.045981880578165414 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7121212121212122, + "acc_stderr,none": 0.03225883512300992 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.7772020725388601, + "acc_stderr,none": 0.03003114797764154 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.5358974358974359, + "acc_stderr,none": 0.025285585990017848 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.5, + "acc_stderr,none": 0.032478490123081544 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.7651376146788991, + "acc_stderr,none": 0.018175110510343578 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.6717557251908397, + "acc_stderr,none": 0.04118438565806298 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.5604575163398693, + "acc_stderr,none": 0.02007942040808792 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6, + "acc_stderr,none": 0.0469237132203465 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.6612244897959184, + "acc_stderr,none": 0.030299506562154188 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.7562189054726368, + "acc_stderr,none": 0.03036049015401465 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.84, + "acc_stderr,none": 0.03684529491774708 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.4627339042182049, + "acc_stderr,none": 0.008541429032898602 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.32, + "acc_stderr,none": 0.04688261722621505 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.5407407407407407, + "acc_stderr,none": 0.04304979692464243 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.625, + "acc_stderr,none": 0.039397364351956274 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.6666666666666666, + "acc_stderr,none": 0.03942082639927214 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.42, + "acc_stderr,none": 0.049604496374885836 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.45, + "acc_stderr,none": 0.05 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.32, + "acc_stderr,none": 0.046882617226215034 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.3627450980392157, + "acc_stderr,none": 0.04784060704105654 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.72, + "acc_stderr,none": 0.045126085985421296 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.502127659574468, + "acc_stderr,none": 0.03268572658667492 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.503448275862069, + "acc_stderr,none": 0.0416656757710158 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.373015873015873, + "acc_stderr,none": 0.02490699045899257 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7096774193548387, + "acc_stderr,none": 0.02582210611941589 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.43842364532019706, + "acc_stderr,none": 0.03491207857486519 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.56, + "acc_stderr,none": 0.04988876515698589 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.28888888888888886, + "acc_stderr,none": 0.027634907264178544 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.304635761589404, + "acc_stderr,none": 0.03757949922943342 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.37037037037037035, + "acc_stderr,none": 0.03293377139415191 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.30357142857142855, + "acc_stderr,none": 0.04364226155841044 + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.35006119951040393, + "acc_stderr,none": 0.01669794942015103, + "alias": "truthfulqa_mc1" + } + }, + "task_info": { + "model": "./Baichuan2-13B-Chat-4bits", + "revision": "main", + "private": false, + "params": 13.0, + "architectures": "BaichuanForCausalLM", + "quant_type": "bitsandbytes", + "precision": "4bit", + "model_params": 13.0, + "model_size": 9.08, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-29T07:32:08Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "bnb_4bit_compute_dtype": "bfloat16", + "bnb_4bit_quant_type": "nf4", + "bnb_4bit_use_double_quant": true, + "llm_int8_enable_fp32_cpu_offload": false, + "llm_int8_has_fp16_weight": false, + "llm_int8_skip_modules": null, + "llm_int8_threshold": 6.0, + "load_in_4bit": true, + "load_in_8bit": false + }, + "versions": { + "harness|piqa|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|winogrande|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|boolq|0": 2.0, + "harness|lambada:openai|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|truthfulqa:mc1|0": 2.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715600850.9519372, + "config": { + "model": "hf", + "model_args": "pretrained=./Baichuan2-13B-Chat-4bits,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 1, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/casperhansen/results_2024-05-08-20-07-49.json b/casperhansen/results_2024-05-08-20-07-49.json new file mode 100644 index 0000000000000000000000000000000000000000..e8e8a38b651ac494538f445660cf54d825dcf84a --- /dev/null +++ b/casperhansen/results_2024-05-08-20-07-49.json @@ -0,0 +1,582 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-08-20-07-49", + "total_evaluation_time_secondes": "", + "model_name": "casperhansen/falcon-7b-awq", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 4.16, + "model_params": 8.33, + "quant_type": "AWQ", + "precision": "4bit" + }, + "results": { + "harness|boolq|0": { + "acc,none": 0.726605504587156, + "acc_stderr,none": 0.0077953705600891975, + "alias": "boolq" + }, + "harness|winogrande|0": { + "acc,none": 0.6890292028413575, + "acc_stderr,none": 0.013009534736286068, + "alias": "winogrande" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.21909424724602203, + "acc_stderr,none": 0.014480038578757447, + "alias": "truthfulqa_mc1" + }, + "harness|arc:easy|0": { + "acc,none": 0.7428451178451179, + "acc_stderr,none": 0.008968394768971991, + "acc_norm,none": 0.7003367003367004, + "acc_norm_stderr,none": 0.009400228586205973, + "alias": "arc_easy" + }, + "harness|mmlu|0": { + "acc,none": 0.2680529839054266, + "acc_stderr,none": 0.0037312184112828226, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.27396386822529223, + "acc_stderr,none": 0.006496081637998288 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.25396825396825395, + "acc_stderr,none": 0.03893259610604673 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.2606060606060606, + "acc_stderr,none": 0.03427743175816524 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.22058823529411764, + "acc_stderr,none": 0.02910225438967409 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.2616033755274262, + "acc_stderr,none": 0.028609516716994934 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.33884297520661155, + "acc_stderr,none": 0.043207678075366705 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.35185185185185186, + "acc_stderr,none": 0.04616631111801715 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.25153374233128833, + "acc_stderr,none": 0.03408997886857529 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.3236994219653179, + "acc_stderr,none": 0.025190181327608422 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.23798882681564246, + "acc_stderr,none": 0.014242630070574885 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.2861736334405145, + "acc_stderr,none": 0.025670259242188936 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.2993827160493827, + "acc_stderr,none": 0.02548311560119546 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.2803129074315515, + "acc_stderr,none": 0.011471555944958616 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.26900584795321636, + "acc_stderr,none": 0.03401052620104089 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.2648857418731896, + "acc_stderr,none": 0.007902788160794474 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.25, + "acc_stderr,none": 0.04351941398892446 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.23773584905660378, + "acc_stderr,none": 0.02619980880756192 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.31213872832369943, + "acc_stderr,none": 0.03533133389323657 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.28, + "acc_stderr,none": 0.04512608598542129 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.29596412556053814, + "acc_stderr,none": 0.03063659134869982 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.27184466019417475, + "acc_stderr,none": 0.044052680241409216 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.2606837606837607, + "acc_stderr,none": 0.028760348956523414 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.28, + "acc_stderr,none": 0.04512608598542127 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.2886334610472541, + "acc_stderr,none": 0.01620379270319779 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.28431372549019607, + "acc_stderr,none": 0.02582916327275748 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.22695035460992907, + "acc_stderr,none": 0.024987106365642973 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.16544117647058823, + "acc_stderr,none": 0.022571771025494757 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.2891566265060241, + "acc_stderr,none": 0.03529486801511115 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.26616834579135523, + "acc_stderr,none": 0.007948025982070027 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.3333333333333333, + "acc_stderr,none": 0.04434600701584925 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.2878787878787879, + "acc_stderr,none": 0.03225883512300993 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.23316062176165803, + "acc_stderr,none": 0.030516111371476008 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.19743589743589743, + "acc_stderr,none": 0.02018264696867483 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.23529411764705882, + "acc_stderr,none": 0.027553614467863797 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.25504587155963304, + "acc_stderr,none": 0.01868850085653585 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.3435114503816794, + "acc_stderr,none": 0.041649760719448786 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.26143790849673204, + "acc_stderr,none": 0.017776947157528054 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.34545454545454546, + "acc_stderr,none": 0.04554619617541054 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.2979591836734694, + "acc_stderr,none": 0.029279567411065674 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.31840796019900497, + "acc_stderr,none": 0.03294118479054096 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.27, + "acc_stderr,none": 0.044619604333847394 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.26419283222327944, + "acc_stderr,none": 0.007832889101339188 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.23, + "acc_stderr,none": 0.04229525846816506 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.28888888888888886, + "acc_stderr,none": 0.0391545063041425 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.29605263157894735, + "acc_stderr,none": 0.03715062154998904 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.20833333333333334, + "acc_stderr,none": 0.033961162058453336 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.18, + "acc_stderr,none": 0.03861229196653694 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.19, + "acc_stderr,none": 0.039427724440366234 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.27, + "acc_stderr,none": 0.0446196043338474 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.30392156862745096, + "acc_stderr,none": 0.04576665403207762 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.31, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.35319148936170214, + "acc_stderr,none": 0.031245325202761926 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.27586206896551724, + "acc_stderr,none": 0.03724563619774632 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.22486772486772486, + "acc_stderr,none": 0.02150209607822914 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.24838709677419354, + "acc_stderr,none": 0.024580028921481003 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.32019704433497537, + "acc_stderr,none": 0.032826493853041504 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.22962962962962963, + "acc_stderr,none": 0.025644108639267624 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.24503311258278146, + "acc_stderr,none": 0.03511807571804724 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.25, + "acc_stderr,none": 0.029531221160930918 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.33035714285714285, + "acc_stderr,none": 0.04464285714285714 + }, + "harness|hellaswag|0": { + "acc,none": 0.5702051384186417, + "acc_stderr,none": 0.004940349676769315, + "acc_norm,none": 0.7559251145190201, + "acc_norm_stderr,none": 0.004286594977390944, + "alias": "hellaswag" + }, + "harness|openbookqa|0": { + "acc,none": 0.308, + "acc_stderr,none": 0.0206670329874661, + "acc_norm,none": 0.438, + "acc_norm_stderr,none": 0.022210326363977417, + "alias": "openbookqa" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.34705041069354287, + "acc_stderr,none": 0.013414222993514864, + "alias": "truthfulqa_mc2" + }, + "harness|piqa|0": { + "acc,none": 0.7959738846572362, + "acc_stderr,none": 0.009402378102942638, + "acc_norm,none": 0.8008705114254625, + "acc_norm_stderr,none": 0.009317391893706877, + "alias": "piqa" + }, + "harness|arc:challenge|0": { + "acc,none": 0.3924914675767918, + "acc_stderr,none": 0.014269634635670691, + "acc_norm,none": 0.42662116040955633, + "acc_norm_stderr,none": 0.014453185592920293, + "alias": "arc_challenge" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.247753848970227, + "perplexity_stderr,none": 0.06320226910987307, + "acc,none": 0.7490782068697845, + "acc_stderr,none": 0.006040109961800763, + "alias": "lambada_openai" + } + }, + "task_info": { + "model": "casperhansen/falcon-7b-awq", + "revision": "main", + "private": false, + "params": 28.0, + "architectures": "RWForCausalLM", + "quant_type": "AWQ", + "precision": "4bit", + "model_params": 56.0, + "model_size": 28.0, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-05-08T00:55:39Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "bits": 4, + "group_size": 64, + "quant_method": "awq", + "version": "gemm", + "zero_point": true + }, + "versions": { + "harness|boolq|0": 2.0, + "harness|winogrande|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|arc:easy|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|hellaswag|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|piqa|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|lambada:openai|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715129862.4682531, + "config": { + "model": "hf", + "model_args": "pretrained=casperhansen/falcon-7b-awq,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 1, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/crusoeai/results_2024-05-15-19-17-05.json b/crusoeai/results_2024-05-15-19-17-05.json new file mode 100644 index 0000000000000000000000000000000000000000..6625f148d01be498d9b8d2bda71b1dad13b45fed --- /dev/null +++ b/crusoeai/results_2024-05-15-19-17-05.json @@ -0,0 +1,579 @@ +{ + "config_general": { + "lighteval_sha": "no", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-15-19-17-05", + "total_evaluation_time_secondes": "", + "model_name": "crusoeai/Llama-3-8B-Instruct-Gradient-1048k-GGUF", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 4.653375488, + "model_params": 8.030261248, + "quant_type": "llama.cpp", + "precision": "4bit" + }, + "results": { + "harness|openbookqa|0": { + "acc,none": 0.318, + "acc_stderr,none": 0.02084757162081401, + "acc_norm,none": 0.428, + "acc_norm_stderr,none": 0.022149790663861926, + "alias": "openbookqa" + }, + "harness|piqa|0": { + "acc,none": 0.7916213275299239, + "acc_stderr,none": 0.009476125383049447, + "acc_norm,none": 0.8052230685527747, + "acc_norm_stderr,none": 0.009240006693317726, + "alias": "piqa" + }, + "harness|boolq|0": { + "acc,none": 0.8321100917431192, + "acc_stderr,none": 0.006537252053566847, + "alias": "boolq" + }, + "harness|mmlu|0": { + "acc,none": 0.6067511750462897, + "acc_stderr,none": 0.0038817899756211985, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5404888416578109, + "acc_stderr,none": 0.006766433869013026 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.40476190476190477, + "acc_stderr,none": 0.043902592653775614 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.6848484848484848, + "acc_stderr,none": 0.0362773057502241 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.8186274509803921, + "acc_stderr,none": 0.027044621719474058 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.8016877637130801, + "acc_stderr,none": 0.025955020841621112 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.7520661157024794, + "acc_stderr,none": 0.039418975265163025 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7222222222222222, + "acc_stderr,none": 0.04330043749650743 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7177914110429447, + "acc_stderr,none": 0.03536117886664742 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.6763005780346821, + "acc_stderr,none": 0.025190181327608422 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.3005586592178771, + "acc_stderr,none": 0.01533456680625116 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6945337620578779, + "acc_stderr,none": 0.026160584450140457 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.7037037037037037, + "acc_stderr,none": 0.025407197798890165 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.42503259452411996, + "acc_stderr,none": 0.012625879884892 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.8011695906432749, + "acc_stderr,none": 0.03061111655743253 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.6964917927261024, + "acc_stderr,none": 0.007974873954487814 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.58, + "acc_stderr,none": 0.049604496374885836 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.7320754716981132, + "acc_stderr,none": 0.027257260322494845 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.6069364161849711, + "acc_stderr,none": 0.0372424959581773 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.41, + "acc_stderr,none": 0.04943110704237101 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.7040358744394619, + "acc_stderr,none": 0.030636591348699796 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.8543689320388349, + "acc_stderr,none": 0.0349260647662379 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8717948717948718, + "acc_stderr,none": 0.02190190511507333 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.7, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.7994891443167306, + "acc_stderr,none": 0.014317653708594206 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.6928104575163399, + "acc_stderr,none": 0.026415601914389002 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.5141843971631206, + "acc_stderr,none": 0.02981549448368206 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.6654411764705882, + "acc_stderr,none": 0.02866199620233531 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.5, + "acc_stderr,none": 0.03892494720807614 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.709782255443614, + "acc_stderr,none": 0.007993439066743081 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.43859649122807015, + "acc_stderr,none": 0.04668000738510455 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7525252525252525, + "acc_stderr,none": 0.03074630074212449 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.844559585492228, + "acc_stderr,none": 0.026148483469153303 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.5974358974358974, + "acc_stderr,none": 0.02486499515976775 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.6722689075630253, + "acc_stderr,none": 0.03048991141767323 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.8201834862385321, + "acc_stderr,none": 0.016465345467391538 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.7251908396946565, + "acc_stderr,none": 0.03915345408847834 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.6470588235294118, + "acc_stderr,none": 0.019333142020797167 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6181818181818182, + "acc_stderr,none": 0.046534298079135075 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.7183673469387755, + "acc_stderr,none": 0.028795185574291282 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.8159203980099502, + "acc_stderr,none": 0.02740385941078684 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.83, + "acc_stderr,none": 0.0377525168068637 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.5166508087535681, + "acc_stderr,none": 0.008620024107570354 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.6518518518518519, + "acc_stderr,none": 0.041153246103369526 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.631578947368421, + "acc_stderr,none": 0.03925523381052932 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.7013888888888888, + "acc_stderr,none": 0.03827052357950756 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.43, + "acc_stderr,none": 0.049756985195624284 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.48, + "acc_stderr,none": 0.050211673156867795 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.36, + "acc_stderr,none": 0.048241815132442176 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.4117647058823529, + "acc_stderr,none": 0.04897104952726367 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.76, + "acc_stderr,none": 0.04292346959909281 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.5659574468085107, + "acc_stderr,none": 0.03240038086792747 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5517241379310345, + "acc_stderr,none": 0.04144311810878151 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.40476190476190477, + "acc_stderr,none": 0.025279850397404904 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7258064516129032, + "acc_stderr,none": 0.025378139970885196 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.5123152709359606, + "acc_stderr,none": 0.035169204442208966 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.61, + "acc_stderr,none": 0.04902071300001974 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.37777777777777777, + "acc_stderr,none": 0.029560707392465715 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.4370860927152318, + "acc_stderr,none": 0.04050035722230636 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.4074074074074074, + "acc_stderr,none": 0.03350991604696043 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.5089285714285714, + "acc_stderr,none": 0.04745033255489123 + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.30966952264381886, + "acc_stderr,none": 0.01618574435514492, + "alias": "truthfulqa_mc1" + }, + "harness|arc:challenge|0": { + "acc,none": 0.5119453924914675, + "acc_stderr,none": 0.014607220340597171, + "acc_norm,none": 0.5469283276450512, + "acc_norm_stderr,none": 0.014546892052005631, + "alias": "arc_challenge" + }, + "harness|hellaswag|0": { + "acc,none": 0.5894244174467238, + "acc_stderr,none": 0.004909328992915085, + "acc_norm,none": 0.768074088826927, + "acc_norm_stderr,none": 0.004211993665515958, + "alias": "hellaswag" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.46346951013407334, + "acc_stderr,none": 0.01485187660034803, + "alias": "truthfulqa_mc2" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.7540333607659977, + "perplexity_stderr,none": 0.08030001592231274, + "acc,none": 0.44071414709877743, + "acc_stderr,none": 0.006916836113835223, + "alias": "lambada_openai" + }, + "harness|winogrande|0": { + "acc,none": 0.7300710339384373, + "acc_stderr,none": 0.01247643337200261, + "alias": "winogrande" + }, + "harness|arc:easy|0": { + "acc,none": 0.8101851851851852, + "acc_stderr,none": 0.008046840527852222, + "acc_norm,none": 0.7824074074074074, + "acc_norm_stderr,none": 0.00846655478907377, + "alias": "arc_easy" + } + }, + "task_info": { + "model": "crusoeai/Llama-3-8B-Instruct-Gradient-1048k-GGUF", + "revision": "main", + "private": false, + "params": null, + "architectures": "?", + "quant_type": "llama.cpp", + "precision": "4bit", + "model_params": null, + "model_size": null, + "weight_dtype": "int4", + "compute_dtype": "int8", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "cpu", + "status": "Pending", + "submitted_time": "2024-05-11T17:37:21Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "llama_cpp" + }, + "quantization_config": { + "quant_method": "llama.cpp", + "ftype": "*Q4_0.gguf" + }, + "versions": { + "harness|openbookqa|0": 1.0, + "harness|piqa|0": 1.0, + "harness|boolq|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|arc:challenge|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|lambada:openai|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|arc:easy|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715704089.1980343, + "config": { + "model": "WrapperGGUFLM", + "model_args": "gguf_model=crusoeai/Llama-3-8B-Instruct-Gradient-1048k-GGUF,ftype=*Q4_0.gguf,dtype=int8,_commit_hash=main", + "batch_size": 1, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/cstr/results_2024-05-14-16-26-20.json b/cstr/results_2024-05-14-16-26-20.json new file mode 100644 index 0000000000000000000000000000000000000000..a5225fcab839cd9361c4264db8c7c6ec5b3e3240 --- /dev/null +++ b/cstr/results_2024-05-14-16-26-20.json @@ -0,0 +1,579 @@ +{ + "config_general": { + "lighteval_sha": "no", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-14-16-26-20", + "total_evaluation_time_secondes": "", + "model_name": "cstr/Spaetzle-v60-7b-Q4_0-GGUF", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 4.108181504, + "model_params": 7.241732096, + "quant_type": "llama.cpp", + "precision": "4bit" + }, + "results": { + "harness|winogrande|0": { + "acc,none": 0.7916337805840569, + "acc_stderr,none": 0.011414554399987729, + "alias": "winogrande" + }, + "harness|lambada:openai|0": { + "perplexity,none": 4.300725774930302, + "perplexity_stderr,none": 0.10340175485197198, + "acc,none": 0.5278478556180866, + "acc_stderr,none": 0.006955164991410334, + "alias": "lambada_openai" + }, + "harness|mmlu|0": { + "acc,none": 0.6204956558894744, + "acc_stderr,none": 0.003843529262430882, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5687566418703507, + "acc_stderr,none": 0.006678320920662939 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.42857142857142855, + "acc_stderr,none": 0.0442626668137991 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7757575757575758, + "acc_stderr,none": 0.03256866661681102 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.8333333333333334, + "acc_stderr,none": 0.02615686752393106 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.8270042194092827, + "acc_stderr,none": 0.024621562866768445 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.8016528925619835, + "acc_stderr,none": 0.03640118271990947 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7685185185185185, + "acc_stderr,none": 0.04077494709252627 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7791411042944786, + "acc_stderr,none": 0.032591773927421776 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.6907514450867052, + "acc_stderr,none": 0.024883140570071748 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.29497206703910617, + "acc_stderr,none": 0.01525193157920817 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6881028938906752, + "acc_stderr,none": 0.026311858071854155 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.7469135802469136, + "acc_stderr,none": 0.024191808600713002 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.47131681877444587, + "acc_stderr,none": 0.012749206007657473 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.8128654970760234, + "acc_stderr,none": 0.029913127232368036 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.7009977470228517, + "acc_stderr,none": 0.007921254317513629 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.59, + "acc_stderr,none": 0.049431107042371025 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.7018867924528301, + "acc_stderr,none": 0.02815283794249387 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.6473988439306358, + "acc_stderr,none": 0.036430371689585475 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.35, + "acc_stderr,none": 0.0479372485441102 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.7174887892376681, + "acc_stderr,none": 0.030216831011508752 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.8155339805825242, + "acc_stderr,none": 0.03840423627288276 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8461538461538461, + "acc_stderr,none": 0.023636873317489274 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.71, + "acc_stderr,none": 0.045604802157206845 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.8250319284802043, + "acc_stderr,none": 0.013586619219903355 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.6928104575163399, + "acc_stderr,none": 0.026415601914389 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.5106382978723404, + "acc_stderr,none": 0.02982074719142244 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.6764705882352942, + "acc_stderr,none": 0.02841820861940675 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.5240963855421686, + "acc_stderr,none": 0.03887971849597264 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.7198570035749107, + "acc_stderr,none": 0.0078831118772103 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.38596491228070173, + "acc_stderr,none": 0.04579639422070435 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7575757575757576, + "acc_stderr,none": 0.030532892233932036 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8601036269430051, + "acc_stderr,none": 0.025033870583015174 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.6538461538461539, + "acc_stderr,none": 0.024121125416941197 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.634453781512605, + "acc_stderr,none": 0.031282177063684614 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.818348623853211, + "acc_stderr,none": 0.016530617409266868 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.7709923664122137, + "acc_stderr,none": 0.036853466317118506 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.6437908496732027, + "acc_stderr,none": 0.019373332420724493 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6545454545454545, + "acc_stderr,none": 0.04554619617541054 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.746938775510204, + "acc_stderr,none": 0.027833023871399697 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.845771144278607, + "acc_stderr,none": 0.025538433368578334 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.83, + "acc_stderr,none": 0.0377525168068637 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.5214081826831589, + "acc_stderr,none": 0.008577657698658724 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.33, + "acc_stderr,none": 0.047258156262526045 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.562962962962963, + "acc_stderr,none": 0.042849586397534 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.6907894736842105, + "acc_stderr,none": 0.037610708698674805 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.7152777777777778, + "acc_stderr,none": 0.03773809990686934 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.39, + "acc_stderr,none": 0.04902071300001974 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.5, + "acc_stderr,none": 0.050251890762960605 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.36, + "acc_stderr,none": 0.048241815132442176 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.43137254901960786, + "acc_stderr,none": 0.04928099597287534 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.71, + "acc_stderr,none": 0.045604802157206845 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.5702127659574469, + "acc_stderr,none": 0.03236214467715563 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5586206896551724, + "acc_stderr,none": 0.04137931034482758 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.42328042328042326, + "acc_stderr,none": 0.025446365634406783 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7548387096774194, + "acc_stderr,none": 0.024472243840895535 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.5123152709359606, + "acc_stderr,none": 0.035169204442208966 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.68, + "acc_stderr,none": 0.04688261722621504 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.36666666666666664, + "acc_stderr,none": 0.02938162072646507 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.31125827814569534, + "acc_stderr,none": 0.03780445850526733 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.4861111111111111, + "acc_stderr,none": 0.03408655867977748 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.49107142857142855, + "acc_stderr,none": 0.04745033255489123 + }, + "harness|piqa|0": { + "acc,none": 0.8171926006528836, + "acc_stderr,none": 0.009017881410505947, + "acc_norm,none": 0.8291621327529923, + "acc_norm_stderr,none": 0.008781267808413661, + "alias": "piqa" + }, + "harness|arc:easy|0": { + "acc,none": 0.8518518518518519, + "acc_stderr,none": 0.007289510108394618, + "acc_norm,none": 0.8244949494949495, + "acc_norm_stderr,none": 0.007805612764705387, + "alias": "arc_easy" + }, + "harness|hellaswag|0": { + "acc,none": 0.6654052977494523, + "acc_stderr,none": 0.004708842600177434, + "acc_norm,none": 0.8477394941246763, + "acc_norm_stderr,none": 0.0035853896364722746, + "alias": "hellaswag" + }, + "harness|boolq|0": { + "acc,none": 0.8798165137614679, + "acc_stderr,none": 0.005687363587870172, + "alias": "boolq" + }, + "harness|openbookqa|0": { + "acc,none": 0.406, + "acc_stderr,none": 0.021983962090086337, + "acc_norm,none": 0.502, + "acc_norm_stderr,none": 0.02238289498648353, + "alias": "openbookqa" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.6247129291773563, + "acc_stderr,none": 0.015270850982333179, + "alias": "truthfulqa_mc2" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.4700122399020808, + "acc_stderr,none": 0.017471992091697544, + "alias": "truthfulqa_mc1" + }, + "harness|arc:challenge|0": { + "acc,none": 0.613481228668942, + "acc_stderr,none": 0.014230084761910467, + "acc_norm,none": 0.6313993174061433, + "acc_norm_stderr,none": 0.014097810678042203, + "alias": "arc_challenge" + } + }, + "task_info": { + "model": "cstr/Spaetzle-v60-7b-Q4_0-GGUF", + "revision": "main", + "private": false, + "params": null, + "architectures": "?", + "quant_type": "llama.cpp", + "precision": "4bit", + "model_params": null, + "model_size": null, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "cpu", + "status": "Pending", + "submitted_time": "2024-05-11T07:32:05Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "llama_cpp" + }, + "quantization_config": { + "quant_method": "llama.cpp", + "ftype": "*Q4_0.gguf" + }, + "versions": { + "harness|winogrande|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|piqa|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|boolq|0": 2.0, + "harness|openbookqa|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|arc:challenge|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715637391.0355272, + "config": { + "model": "WrapperGGUFLM", + "model_args": "gguf_model=cstr/Spaetzle-v60-7b-Q4_0-GGUF,ftype=*Q4_0.gguf,dtype=?,_commit_hash=main", + "batch_size": 1, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/cstr/results_2024-05-14-20-19-29.json b/cstr/results_2024-05-14-20-19-29.json new file mode 100644 index 0000000000000000000000000000000000000000..4d10d6cc48dee19c8b2920df5b94475db6c34b1f --- /dev/null +++ b/cstr/results_2024-05-14-20-19-29.json @@ -0,0 +1,595 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-14-20-19-29", + "total_evaluation_time_secondes": "", + "model_name": "cstr/Spaetzle-v60-7b-int4-inc", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 4.16, + "model_params": 7.04, + "quant_type": "AutoRound", + "precision": "4bit" + }, + "results": { + "harness|hellaswag|0": { + "acc,none": 0.6643098984266083, + "acc_stderr,none": 0.004712660409846858, + "acc_norm,none": 0.8451503684524995, + "acc_norm_stderr,none": 0.0036102194130613477, + "alias": "hellaswag" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.439016746061463, + "perplexity_stderr,none": 0.07651117989296828, + "acc,none": 0.7058024451775665, + "acc_stderr,none": 0.00634853007817727, + "alias": "lambada_openai" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.6556633657896446, + "acc_stderr,none": 0.015137766174633594, + "alias": "truthfulqa_mc2" + }, + "harness|boolq|0": { + "acc,none": 0.8733944954128441, + "acc_stderr,none": 0.005815995464335388, + "alias": "boolq" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.5018359853121175, + "acc_stderr,none": 0.01750338304687704, + "alias": "truthfulqa_mc1" + }, + "harness|arc:easy|0": { + "acc,none": 0.8526936026936027, + "acc_stderr,none": 0.007272362176697239, + "acc_norm,none": 0.8198653198653199, + "acc_norm_stderr,none": 0.007885661261794777, + "alias": "arc_easy" + }, + "harness|winogrande|0": { + "acc,none": 0.7750591949486977, + "acc_stderr,none": 0.011735043564126735, + "alias": "winogrande" + }, + "harness|openbookqa|0": { + "acc,none": 0.37, + "acc_stderr,none": 0.02161328916516578, + "acc_norm,none": 0.48, + "acc_norm_stderr,none": 0.02236516042423134, + "alias": "openbookqa" + }, + "harness|piqa|0": { + "acc,none": 0.8226332970620239, + "acc_stderr,none": 0.00891219356474512, + "acc_norm,none": 0.8302502720348205, + "acc_norm_stderr,none": 0.008758999037429159, + "alias": "piqa" + }, + "harness|mmlu|0": { + "acc,none": 0.6139438826378009, + "acc_stderr,none": 0.003851602203891664, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5602550478214665, + "acc_stderr,none": 0.006683438465825116 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.4444444444444444, + "acc_stderr,none": 0.04444444444444449 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7636363636363637, + "acc_stderr,none": 0.033175059300091805 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.8284313725490197, + "acc_stderr,none": 0.02646056956124065 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.8227848101265823, + "acc_stderr,none": 0.024856364184503234 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.7851239669421488, + "acc_stderr,none": 0.037494924487096966 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7685185185185185, + "acc_stderr,none": 0.04077494709252627 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7361963190184049, + "acc_stderr,none": 0.034624199316156234 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.6994219653179191, + "acc_stderr,none": 0.024685316867257796 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.2871508379888268, + "acc_stderr,none": 0.015131608849963766 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.7041800643086816, + "acc_stderr,none": 0.02592237178881879 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.7376543209876543, + "acc_stderr,none": 0.024477222856135114 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.45436766623207303, + "acc_stderr,none": 0.012716941720734808 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.8070175438596491, + "acc_stderr,none": 0.030267457554898465 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.6942388155777277, + "acc_stderr,none": 0.007979014705714821 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.61, + "acc_stderr,none": 0.04902071300001974 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.6943396226415094, + "acc_stderr,none": 0.028353298073322663 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.6242774566473989, + "acc_stderr,none": 0.036928207672648664 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.4, + "acc_stderr,none": 0.04923659639173309 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.7040358744394619, + "acc_stderr,none": 0.030636591348699796 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.8058252427184466, + "acc_stderr,none": 0.03916667762822584 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8547008547008547, + "acc_stderr,none": 0.02308663508684141 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.72, + "acc_stderr,none": 0.045126085985421276 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.8160919540229885, + "acc_stderr,none": 0.01385372417092253 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.6862745098039216, + "acc_stderr,none": 0.026568921015457155 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.4929078014184397, + "acc_stderr,none": 0.02982449855912901 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.6691176470588235, + "acc_stderr,none": 0.028582709753898435 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.4939759036144578, + "acc_stderr,none": 0.03892212195333045 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.7179070523236919, + "acc_stderr,none": 0.00793343808995908 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.49122807017543857, + "acc_stderr,none": 0.04702880432049615 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7525252525252525, + "acc_stderr,none": 0.0307463007421245 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8756476683937824, + "acc_stderr,none": 0.023814477086593577 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.6205128205128205, + "acc_stderr,none": 0.024603626924097417 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.6386554621848739, + "acc_stderr,none": 0.031204691225150023 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.8091743119266055, + "acc_stderr,none": 0.016847676400091112 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.7786259541984732, + "acc_stderr,none": 0.03641297081313729 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.6454248366013072, + "acc_stderr,none": 0.01935336054755369 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6818181818181818, + "acc_stderr,none": 0.04461272175910508 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.7061224489795919, + "acc_stderr,none": 0.02916273841024977 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.8407960199004975, + "acc_stderr,none": 0.025870646766169136 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.86, + "acc_stderr,none": 0.03487350880197771 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.5134792261338408, + "acc_stderr,none": 0.008544368645395404 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.29, + "acc_stderr,none": 0.045604802157206824 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.562962962962963, + "acc_stderr,none": 0.042849586397534 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.6776315789473685, + "acc_stderr,none": 0.038035102483515854 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.7291666666666666, + "acc_stderr,none": 0.03716177437566017 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.43, + "acc_stderr,none": 0.049756985195624284 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.56, + "acc_stderr,none": 0.04988876515698589 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.29, + "acc_stderr,none": 0.04560480215720684 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.4215686274509804, + "acc_stderr,none": 0.049135952012744975 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.73, + "acc_stderr,none": 0.044619604333847394 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.5531914893617021, + "acc_stderr,none": 0.0325005368436584 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5241379310344828, + "acc_stderr,none": 0.0416180850350153 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.4126984126984127, + "acc_stderr,none": 0.025355741263055263 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7548387096774194, + "acc_stderr,none": 0.02447224384089554 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.46798029556650245, + "acc_stderr,none": 0.03510766597959214 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.68, + "acc_stderr,none": 0.04688261722621505 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.34444444444444444, + "acc_stderr,none": 0.02897264888484427 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.33112582781456956, + "acc_stderr,none": 0.038425817186598696 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.4675925925925926, + "acc_stderr,none": 0.03402801581358966 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.5267857142857143, + "acc_stderr,none": 0.047389751192741546 + }, + "harness|arc:challenge|0": { + "acc,none": 0.621160409556314, + "acc_stderr,none": 0.014175915490000328, + "acc_norm,none": 0.6390784982935154, + "acc_norm_stderr,none": 0.01403476138617546, + "alias": "arc_challenge" + } + }, + "task_info": { + "model": "cstr/Spaetzle-v60-7b-int4-inc", + "revision": "main", + "private": false, + "params": 4.16, + "architectures": "MistralForCausalLM", + "quant_type": "AutoRound", + "precision": "4bit", + "model_params": 7.04, + "model_size": 4.16, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Waiting", + "submitted_time": "2024-05-11T11:55:16Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "autoround_version": "0.11", + "bits": 4, + "damp_percent": 0.01, + "desc_act": false, + "enable_minmax_tuning": true, + "group_size": 128, + "is_marlin_format": false, + "iters": 200, + "lr": 0.005, + "minmax_lr": 0.005, + "model_file_base_name": "model", + "model_name_or_path": null, + "quant_method": "gptq", + "scale_dtype": "torch.float32", + "static_groups": false, + "sym": false, + "true_sequential": false, + "use_quant_input": true + }, + "versions": { + "harness|hellaswag|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|boolq|0": 2.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|arc:easy|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|piqa|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|arc:challenge|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715684444.5114553, + "config": { + "model": "hf", + "model_args": "pretrained=cstr/Spaetzle-v60-7b-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/cstr/results_2024-05-18-23-17-16.json b/cstr/results_2024-05-18-23-17-16.json new file mode 100644 index 0000000000000000000000000000000000000000..f14590bb1057c7f7306507cf60522a49e4a0152f --- /dev/null +++ b/cstr/results_2024-05-18-23-17-16.json @@ -0,0 +1,595 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-18-23-17-16", + "total_evaluation_time_secondes": "", + "model_name": "cstr/llama3-8b-spaetzle-v20-int4-inc", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 5.74, + "model_params": 7.04, + "quant_type": "AutoRound", + "precision": "4bit" + }, + "results": { + "harness|arc:challenge|0": { + "acc,none": 0.6177474402730375, + "acc_stderr,none": 0.014200454049979275, + "acc_norm,none": 0.6450511945392492, + "acc_norm_stderr,none": 0.01398303690409409, + "alias": "arc_challenge" + }, + "harness|boolq|0": { + "acc,none": 0.8275229357798165, + "acc_stderr,none": 0.0066076683238905365, + "alias": "boolq" + }, + "harness|mmlu|0": { + "acc,none": 0.6417177040307649, + "acc_stderr,none": 0.003795776014125404, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5793836344314559, + "acc_stderr,none": 0.006670334869390845 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.5238095238095238, + "acc_stderr,none": 0.04467062628403273 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7212121212121212, + "acc_stderr,none": 0.03501438706296781 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.8284313725490197, + "acc_stderr,none": 0.026460569561240658 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.8523206751054853, + "acc_stderr,none": 0.023094329582595673 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.7520661157024794, + "acc_stderr,none": 0.03941897526516302 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7777777777777778, + "acc_stderr,none": 0.0401910747255735 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7975460122699386, + "acc_stderr,none": 0.03157065078911899 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.7109826589595376, + "acc_stderr,none": 0.02440517393578323 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.2905027932960894, + "acc_stderr,none": 0.015183844307206151 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.7202572347266881, + "acc_stderr,none": 0.025494259350694902 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.7438271604938271, + "acc_stderr,none": 0.024288533637726095 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.4941329856584094, + "acc_stderr,none": 0.012769356925216526 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7953216374269005, + "acc_stderr,none": 0.030944459778533204 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.718056002574831, + "acc_stderr,none": 0.0077761783288632785 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.66, + "acc_stderr,none": 0.04760952285695237 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.7320754716981132, + "acc_stderr,none": 0.027257260322494845 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.653179190751445, + "acc_stderr,none": 0.036291466701596636 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.39, + "acc_stderr,none": 0.04902071300001974 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6995515695067265, + "acc_stderr,none": 0.030769352008229136 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.8543689320388349, + "acc_stderr,none": 0.03492606476623789 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8717948717948718, + "acc_stderr,none": 0.021901905115073332 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.8, + "acc_stderr,none": 0.04020151261036843 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.8237547892720306, + "acc_stderr,none": 0.013625556907993478 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.7058823529411765, + "acc_stderr,none": 0.02609016250427904 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.5177304964539007, + "acc_stderr,none": 0.02980873964223777 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.7352941176470589, + "acc_stderr,none": 0.026799562024887678 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.5060240963855421, + "acc_stderr,none": 0.03892212195333045 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.7539811504712383, + "acc_stderr,none": 0.00760262086971358 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.5263157894736842, + "acc_stderr,none": 0.046970851366478626 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.803030303030303, + "acc_stderr,none": 0.02833560973246336 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8963730569948186, + "acc_stderr,none": 0.021995311963644248 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.6384615384615384, + "acc_stderr,none": 0.024359581465396997 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.7563025210084033, + "acc_stderr,none": 0.027886828078380565 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.8458715596330275, + "acc_stderr,none": 0.01548082686537429 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.7709923664122137, + "acc_stderr,none": 0.036853466317118506 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.6911764705882353, + "acc_stderr,none": 0.018690850273595284 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.7181818181818181, + "acc_stderr,none": 0.043091187099464585 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.7306122448979592, + "acc_stderr,none": 0.02840125202902294 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.8557213930348259, + "acc_stderr,none": 0.024845753212306053 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.84, + "acc_stderr,none": 0.03684529491774708 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.5499524262607041, + "acc_stderr,none": 0.008539669749142128 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.6592592592592592, + "acc_stderr,none": 0.04094376269996793 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.6973684210526315, + "acc_stderr,none": 0.03738520676119668 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.7708333333333334, + "acc_stderr,none": 0.035146974678623884 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.43, + "acc_stderr,none": 0.04975698519562428 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.49, + "acc_stderr,none": 0.05024183937956912 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.37, + "acc_stderr,none": 0.04852365870939099 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.45098039215686275, + "acc_stderr,none": 0.04951218252396262 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.74, + "acc_stderr,none": 0.04408440022768078 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.5531914893617021, + "acc_stderr,none": 0.0325005368436584 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.6068965517241379, + "acc_stderr,none": 0.0407032901370707 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.47883597883597884, + "acc_stderr,none": 0.025728230952130726 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7677419354838709, + "acc_stderr,none": 0.02402225613030824 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.4827586206896552, + "acc_stderr,none": 0.035158955511657 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.73, + "acc_stderr,none": 0.044619604333847394 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.4, + "acc_stderr,none": 0.029869605095316904 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.37748344370860926, + "acc_stderr,none": 0.0395802723112157 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.5462962962962963, + "acc_stderr,none": 0.03395322726375797 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.5178571428571429, + "acc_stderr,none": 0.04742762361243011 + }, + "harness|hellaswag|0": { + "acc,none": 0.6278629755028878, + "acc_stderr,none": 0.004823867761332462, + "acc_norm,none": 0.8094005178251344, + "acc_norm_stderr,none": 0.003919711594888583, + "alias": "hellaswag" + }, + "harness|piqa|0": { + "acc,none": 0.8041349292709467, + "acc_stderr,none": 0.009259518041395784, + "acc_norm,none": 0.8122959738846572, + "acc_norm_stderr,none": 0.009110440292132567, + "alias": "piqa" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.6020237507424406, + "acc_stderr,none": 0.015440092322817846, + "alias": "truthfulqa_mc2" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.4320685434516524, + "acc_stderr,none": 0.01734120239498826, + "alias": "truthfulqa_mc1" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.1369128271654843, + "perplexity_stderr,none": 0.07922627082104675, + "acc,none": 0.7172520861634, + "acc_stderr,none": 0.00627404584097121, + "alias": "lambada_openai" + }, + "harness|openbookqa|0": { + "acc,none": 0.374, + "acc_stderr,none": 0.021660710347204484, + "acc_norm,none": 0.454, + "acc_norm_stderr,none": 0.02228814759117695, + "alias": "openbookqa" + }, + "harness|winogrande|0": { + "acc,none": 0.7466456195737964, + "acc_stderr,none": 0.012223754434233626, + "alias": "winogrande" + }, + "harness|arc:easy|0": { + "acc,none": 0.8539562289562289, + "acc_stderr,none": 0.007246487003204766, + "acc_norm,none": 0.8400673400673401, + "acc_norm_stderr,none": 0.00752131579248775, + "alias": "arc_easy" + } + }, + "task_info": { + "model": "cstr/llama3-8b-spaetzle-v20-int4-inc", + "revision": "main", + "private": false, + "params": 5.74, + "architectures": "LlamaForCausalLM", + "quant_type": "AutoRound", + "precision": "4bit", + "model_params": 7.04, + "model_size": 5.74, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-05-18T10:08:36Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "autoround_version": "0.11", + "bits": 4, + "damp_percent": 0.01, + "desc_act": false, + "enable_minmax_tuning": true, + "group_size": 128, + "is_marlin_format": false, + "iters": 200, + "lr": 0.005, + "minmax_lr": 0.005, + "model_file_base_name": "model", + "model_name_or_path": null, + "quant_method": "gptq", + "scale_dtype": "torch.float32", + "static_groups": false, + "sym": false, + "true_sequential": false, + "use_quant_input": true + }, + "versions": { + "harness|arc:challenge|0": 1.0, + "harness|boolq|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|hellaswag|0": 1.0, + "harness|piqa|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|lambada:openai|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|arc:easy|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1716040661.0656447, + "config": { + "model": "hf", + "model_args": "pretrained=cstr/llama3-8b-spaetzle-v20-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/facebook/opt-1.3b/results_2024-04-10-05-16-22.json b/facebook/opt-1.3b/results_2024-04-10-05-16-22.json deleted file mode 100644 index b5cc82032963a86ba24a3064b085b0fb12a1a2c4..0000000000000000000000000000000000000000 --- a/facebook/opt-1.3b/results_2024-04-10-05-16-22.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "config_general": { - "lighteval_sha": "1.14", - "num_few_shot_default": null, - "num_fewshot_seeds": null, - "override_batch_size": null, - "max_samples": null, - "job_id": 2, - "start_time": "2024-04-10 05:04:42", - "end_time": "2024-04-10-05-16-22", - "total_evaluation_time_secondes": "", - "model_name": "facebook/opt-1.3b", - "model_sha": "", - "model_dtype": "bfloat16", - "model_size": 1.3 - }, - "results": { - "harness|winogrande|0": { - "acc": 0.5951065509076559, - "acc_stderr": 0.013795927003124934 - }, - "harness|arc:easy|0": { - "acc": 0.5707070707070707, - "acc_stderr": 0.010156678075911085, - "acc_norm": 0.5088383838383839, - "acc_norm_stderr": 0.010258180468004831 - }, - "harness|arc:challenge|0": { - "acc": 0.23464163822525597, - "acc_stderr": 0.012383873560768675, - "acc_norm": 0.29692832764505117, - "acc_norm_stderr": 0.013352025976725222 - }, - "harness|truthfulqa:mc|0": { - "mc1": 0.24112607099143207, - "mc1_stderr": 0.014974827279752332, - "mc2": 0.38653059858550987, - "mc2_stderr": 0.014234618399973241 - } - }, - "task_info": { - "model": "facebook/opt-1.3b", - "base_model": "", - "revision": "main", - "private": false, - "precision": "bfloat16", - "params": 1.3, - "architectures": "OPTForCausalLM", - "weight_type": "Original", - "status": "FINISHED", - "submitted_time": "2024-04-10T11:23:43Z", - "model_type": "\ud83d\udfe2 : pretrained", - "job_id": 2, - "job_start_time": "2024-04-10 05:04:42" - }, - "versions": { - "harness|winogrande|0": 0, - "harness|arc:easy|0": 0, - "harness|arc:challenge|0": 0, - "harness|truthfulqa:mc|0": 1 - } -} \ No newline at end of file diff --git a/facebook/opt-125m/results_2024-04-10-05-42-53.json b/facebook/opt-125m/results_2024-04-10-05-42-53.json deleted file mode 100644 index fc859ff47b78036380a4b2dfeeacf22dcfa6b7a3..0000000000000000000000000000000000000000 --- a/facebook/opt-125m/results_2024-04-10-05-42-53.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "config_general": { - "lighteval_sha": "1.14", - "num_few_shot_default": null, - "num_fewshot_seeds": null, - "override_batch_size": null, - "max_samples": null, - "job_id": 3, - "start_time": "2024-04-10 05:34:32", - "end_time": "2024-04-10-05-42-53", - "total_evaluation_time_secondes": "", - "model_name": "facebook/opt-125m", - "model_sha": "", - "model_dtype": "bfloat16", - "model_size": 0.125 - }, - "results": { - "harness|winogrande|0": { - "acc": 0.500394632991318, - "acc_stderr": 0.014052481306049516 - }, - "harness|arc:easy|0": { - "acc": 0.4351851851851852, - "acc_stderr": 0.010173216430370904, - "acc_norm": 0.3985690235690236, - "acc_norm_stderr": 0.01004645540047793 - }, - "harness|arc:challenge|0": { - "acc": 0.19539249146757678, - "acc_stderr": 0.01158690718995291, - "acc_norm": 0.23293515358361774, - "acc_norm_stderr": 0.0123525070426174 - }, - "harness|truthfulqa:mc|0": { - "mc1": 0.23990208078335373, - "mc1_stderr": 0.014948812679062135, - "mc2": 0.4285249783495301, - "mc2_stderr": 0.01506992646587412 - } - }, - "task_info": { - "model": "facebook/opt-125m", - "base_model": "", - "revision": "main", - "private": false, - "precision": "bfloat16", - "params": 0.125, - "architectures": "OPTForCausalLM", - "weight_type": "Original", - "status": "FINISHED", - "submitted_time": "2024-04-10T12:05:21Z", - "model_type": "\ud83d\udfe2 : pretrained", - "job_id": 3, - "job_start_time": "2024-04-10 05:34:32" - }, - "versions": { - "harness|winogrande|0": 0, - "harness|arc:easy|0": 0, - "harness|arc:challenge|0": 0, - "harness|truthfulqa:mc|0": 1 - } -} \ No newline at end of file diff --git a/facebook/opt-350m/results_2024-04-10-06-34-20.json b/facebook/opt-350m/results_2024-04-10-06-34-20.json deleted file mode 100644 index e64974e3ce9c6a4d2a571f2b6ae4774f96a5972f..0000000000000000000000000000000000000000 --- a/facebook/opt-350m/results_2024-04-10-06-34-20.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "config_general": { - "lighteval_sha": "1.14", - "num_few_shot_default": null, - "num_fewshot_seeds": null, - "override_batch_size": null, - "max_samples": null, - "job_id": 4, - "start_time": "2024-04-10 06:13:22", - "end_time": "2024-04-10-06-34-20", - "total_evaluation_time_secondes": "", - "model_name": "facebook/opt-350m", - "model_sha": "", - "model_dtype": "bfloat16", - "model_size": 0.35 - }, - "results": { - "harness|winogrande|0": { - "acc": 0.5272296764009471, - "acc_stderr": 0.014031631629827698 - }, - "harness|arc:easy|0": { - "acc": 0.44107744107744107, - "acc_stderr": 0.01018829322104055, - "acc_norm": 0.4027777777777778, - "acc_norm_stderr": 0.01006396049498916 - }, - "harness|arc:challenge|0": { - "acc": 0.20648464163822525, - "acc_stderr": 0.011828865619002316, - "acc_norm": 0.24232081911262798, - "acc_norm_stderr": 0.012521593295800116 - }, - "harness|truthfulqa:mc|0": { - "mc1": 0.2350061199510404, - "mc1_stderr": 0.014843061507731624, - "mc2": 0.40953888018781043, - "mc2_stderr": 0.014695924210168897 - } - }, - "task_info": { - "model": "facebook/opt-350m", - "base_model": "", - "revision": "main", - "private": false, - "precision": "bfloat16", - "params": 0.35, - "architectures": "OPTForCausalLM", - "weight_type": "Original", - "status": "FINISHED", - "submitted_time": "2024-04-10T13:12:22Z", - "model_type": "\ud83d\udfe2 : pretrained", - "job_id": 4, - "job_start_time": "2024-04-10 06:13:22" - }, - "versions": { - "harness|winogrande|0": 0, - "harness|arc:easy|0": 0, - "harness|arc:challenge|0": 0, - "harness|truthfulqa:mc|0": 1 - } -} \ No newline at end of file diff --git a/facebook/opt-350m/results_2024-04-11-00-53-12.json b/facebook/opt-350m/results_2024-04-11-00-53-12.json deleted file mode 100644 index 36d0c12b0a0916d8680388de318c85bb6171f7c8..0000000000000000000000000000000000000000 --- a/facebook/opt-350m/results_2024-04-11-00-53-12.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "config_general": { - "lighteval_sha": "1.14", - "num_few_shot_default": null, - "num_fewshot_seeds": null, - "override_batch_size": null, - "max_samples": null, - "job_id": 6, - "start_time": "2024-04-11 00:40:02", - "end_time": "2024-04-11-00-53-12", - "total_evaluation_time_secondes": "", - "model_name": "facebook/opt-350m", - "model_sha": "", - "model_dtype": "8bit", - "quant_type": "Rtn", - "weight_dtype": "int8", - "model_size": 0.35 - }, - "results": { - "harness|winogrande|0": { - "acc": 0.5272296764009471, - "acc_stderr": 0.014031631629827698 - }, - "harness|arc:easy|0": { - "acc": 0.44065656565656564, - "acc_stderr": 0.010187264635711981, - "acc_norm": 0.4006734006734007, - "acc_norm_stderr": 0.010055304474255554 - }, - "harness|arc:challenge|0": { - "acc": 0.2090443686006826, - "acc_stderr": 0.011882746987406451, - "acc_norm": 0.24146757679180889, - "acc_norm_stderr": 0.012506564839739432 - }, - "harness|truthfulqa:mc|0": { - "mc1": 0.23745410036719705, - "mc1_stderr": 0.014896277441041843, - "mc2": 0.409767382680759, - "mc2_stderr": 0.014709547172142332 - } - }, - "task_info": { - "model": "facebook/opt-350m", - "base_model": "", - "revision": "main", - "private": false, - "precision": "8bit", - "quant_type": "Rtn", - "weight_dtype": "int8", - "params": 0.35, - "architectures": "OPTForCausalLM", - "weight_type": "Original", - "status": "FINISHED", - "submitted_time": "2024-04-11T05:48:05Z", - "model_type": "\ud83d\udfe2 : pretrained", - "job_id": 6, - "job_start_time": "2024-04-11 00:40:02" - }, - "versions": { - "harness|winogrande|0": 0, - "harness|arc:easy|0": 0, - "harness|arc:challenge|0": 0, - "harness|truthfulqa:mc|0": 1 - } -} \ No newline at end of file diff --git a/kaitchup/results_2024-04-30-14-27-42.json b/kaitchup/results_2024-04-30-14-27-42.json new file mode 100644 index 0000000000000000000000000000000000000000..65f1fd060ef6d5919df4f47a7d5d3edfc02d90c0 --- /dev/null +++ b/kaitchup/results_2024-04-30-14-27-42.json @@ -0,0 +1,586 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-04-30-14-27-42", + "total_evaluation_time_secondes": "", + "model_name": "kaitchup/Phi-3-mini-4k-instruct-gptq-4bit", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 2.28, + "model_params": 3.66, + "quant_type": "GPTQ", + "precision": "4bit" + }, + "results": { + "harness|truthfulqa:mc2|0": { + "acc,none": 0.5615437016484888, + "acc_stderr,none": 0.01533267530885383, + "alias": "truthfulqa_mc2" + }, + "harness|hellaswag|0": { + "acc,none": 0.5936068512248556, + "acc_stderr,none": 0.004901558132335512, + "acc_norm,none": 0.7747460665206134, + "acc_norm_stderr,none": 0.004168956832201653, + "alias": "hellaswag" + }, + "harness|openbookqa|0": { + "acc,none": 0.386, + "acc_stderr,none": 0.021793529219281165, + "acc_norm,none": 0.458, + "acc_norm_stderr,none": 0.022303966774269955, + "alias": "openbookqa" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.36964504283965727, + "acc_stderr,none": 0.016898180706973884, + "alias": "truthfulqa_mc1" + }, + "harness|boolq|0": { + "acc,none": 0.8550458715596331, + "acc_stderr,none": 0.006157473358036367, + "alias": "boolq" + }, + "harness|arc:challenge|0": { + "acc,none": 0.5520477815699659, + "acc_stderr,none": 0.014532011498211676, + "acc_norm,none": 0.5708191126279863, + "acc_norm_stderr,none": 0.014464085894870655, + "alias": "arc_challenge" + }, + "harness|mmlu|0": { + "acc,none": 0.6629397521720553, + "acc_stderr,none": 0.0038030181535576306, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.6267800212539851, + "acc_stderr,none": 0.0067741299303627255 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.49206349206349204, + "acc_stderr,none": 0.044715725362943486 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.8242424242424242, + "acc_stderr,none": 0.02972094300622445 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.803921568627451, + "acc_stderr,none": 0.027865942286639325 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.7932489451476793, + "acc_stderr,none": 0.02636165166838911 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.7933884297520661, + "acc_stderr,none": 0.036959801280988254 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7685185185185185, + "acc_stderr,none": 0.04077494709252627 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7975460122699386, + "acc_stderr,none": 0.031570650789119 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.7138728323699421, + "acc_stderr,none": 0.024332146779134114 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.5407821229050279, + "acc_stderr,none": 0.01666678361652578 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.7234726688102894, + "acc_stderr,none": 0.02540383297817963 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.7654320987654321, + "acc_stderr,none": 0.023576881744005712 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.48565840938722293, + "acc_stderr,none": 0.012764981829524274 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.8245614035087719, + "acc_stderr,none": 0.029170885500727654 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.7022851625362085, + "acc_stderr,none": 0.007953873535549752 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.64, + "acc_stderr,none": 0.04824181513244218 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.7320754716981132, + "acc_stderr,none": 0.027257260322494845 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.6647398843930635, + "acc_stderr,none": 0.03599586301247077 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.38, + "acc_stderr,none": 0.048783173121456316 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6591928251121076, + "acc_stderr,none": 0.03181149747055359 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.7766990291262136, + "acc_stderr,none": 0.04123553189891431 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8803418803418803, + "acc_stderr,none": 0.02126271940040694 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.7, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.8033205619412516, + "acc_stderr,none": 0.014214138556913907 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.7124183006535948, + "acc_stderr,none": 0.02591780611714716 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.5425531914893617, + "acc_stderr,none": 0.029719281272236848 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.6764705882352942, + "acc_stderr,none": 0.028418208619406752 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.5060240963855421, + "acc_stderr,none": 0.03892212195333045 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.7734806629834254, + "acc_stderr,none": 0.007386046757088958 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.4824561403508772, + "acc_stderr,none": 0.04700708033551038 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.8131313131313131, + "acc_stderr,none": 0.027772533334218984 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8704663212435233, + "acc_stderr,none": 0.02423353229775872 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.7102564102564103, + "acc_stderr,none": 0.023000628243687964 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.8025210084033614, + "acc_stderr,none": 0.025859164122051453 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.8660550458715597, + "acc_stderr,none": 0.014602811435592635 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.732824427480916, + "acc_stderr,none": 0.03880848301082397 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.7173202614379085, + "acc_stderr,none": 0.018217269552053432 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6454545454545455, + "acc_stderr,none": 0.04582004841505416 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.7755102040816326, + "acc_stderr,none": 0.0267114305555384 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.8656716417910447, + "acc_stderr,none": 0.024112678240900822 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.86, + "acc_stderr,none": 0.03487350880197771 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.5702505550269584, + "acc_stderr,none": 0.00844317204718129 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.37, + "acc_stderr,none": 0.048523658709391 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.6296296296296297, + "acc_stderr,none": 0.041716541613545426 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.756578947368421, + "acc_stderr,none": 0.034923496688842384 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.8194444444444444, + "acc_stderr,none": 0.032166008088022675 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.44, + "acc_stderr,none": 0.04988876515698589 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.56, + "acc_stderr,none": 0.04988876515698589 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.34, + "acc_stderr,none": 0.04760952285695235 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.4215686274509804, + "acc_stderr,none": 0.04913595201274498 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.71, + "acc_stderr,none": 0.045604802157206845 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.5702127659574469, + "acc_stderr,none": 0.03236214467715564 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.6068965517241379, + "acc_stderr,none": 0.040703290137070705 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.5343915343915344, + "acc_stderr,none": 0.02569032176249384 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.8129032258064516, + "acc_stderr,none": 0.022185710092252252 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.5812807881773399, + "acc_stderr,none": 0.034711928605184676 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.69, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.34444444444444444, + "acc_stderr,none": 0.028972648884844267 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.4105960264900662, + "acc_stderr,none": 0.04016689594849928 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.5462962962962963, + "acc_stderr,none": 0.03395322726375797 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.5267857142857143, + "acc_stderr,none": 0.047389751192741546 + }, + "harness|lambada:openai|0": { + "perplexity,none": 4.617501632008537, + "perplexity_stderr,none": 0.11277909829460228, + "acc,none": 0.6697069668154473, + "acc_stderr,none": 0.0065524571249181846, + "alias": "lambada_openai" + }, + "harness|piqa|0": { + "acc,none": 0.7845484221980413, + "acc_stderr,none": 0.009592463115658093, + "acc_norm,none": 0.7883569096844396, + "acc_norm_stderr,none": 0.009530351270479399, + "alias": "piqa" + }, + "harness|winogrande|0": { + "acc,none": 0.728492501973165, + "acc_stderr,none": 0.012499326254893127, + "alias": "winogrande" + }, + "harness|arc:easy|0": { + "acc,none": 0.8177609427609428, + "acc_stderr,none": 0.007921402939423302, + "acc_norm,none": 0.79503367003367, + "acc_norm_stderr,none": 0.008283277600626407, + "alias": "arc_easy" + } + }, + "task_info": { + "model": "kaitchup/Phi-3-mini-4k-instruct-gptq-4bit", + "revision": "main", + "private": false, + "params": 2.736, + "architectures": "Phi3ForCausalLM", + "quant_type": "GPTQ", + "precision": "4bit", + "model_params": 5.472, + "model_size": 2.736, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-29T07:26:45Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "bits": 4, + "damp_percent": 0.1, + "dataset": "c4", + "desc_act": false, + "group_size": 128, + "modules_in_block_to_quantize": null, + "quant_method": "gptq", + "sym": true, + "true_sequential": true + }, + "versions": { + "harness|truthfulqa:mc2|0": 2.0, + "harness|hellaswag|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|boolq|0": 2.0, + "harness|arc:challenge|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|lambada:openai|0": 1.0, + "harness|piqa|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|arc:easy|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714455514.1273313, + "config": { + "model": "hf", + "model_args": "pretrained=kaitchup/Phi-3-mini-4k-instruct-gptq-4bit,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 4, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/kreas/results_2024-05-15-00-27-07.json b/kreas/results_2024-05-15-00-27-07.json new file mode 100644 index 0000000000000000000000000000000000000000..e2a27ec586efb6014cdeec6b699ace99b3373b61 --- /dev/null +++ b/kreas/results_2024-05-15-00-27-07.json @@ -0,0 +1,599 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-15-00-27-07", + "total_evaluation_time_secondes": "", + "model_name": "kreas/Mistral-7B-v0.1-GPTQ-2bit", + "model_sha": "", + "model_dtype": "2bit", + "model_size": 2.4, + "model_params": 7, + "quant_type": "GPTQ", + "precision": "2bit" + }, + "results": { + "harness|truthfulqa:mc1|0": { + "acc,none": 0.2521419828641371, + "acc_stderr,none": 0.015201522246299965, + "alias": "truthfulqa_mc1" + }, + "harness|winogrande|0": { + "acc,none": 0.494869771112865, + "acc_stderr,none": 0.014051745961790513, + "alias": "winogrande" + }, + "harness|arc:easy|0": { + "acc,none": 0.2916666666666667, + "acc_stderr,none": 0.009326752065621158, + "acc_norm,none": 0.2962962962962963, + "acc_norm_stderr,none": 0.009369711585684308, + "alias": "arc_easy" + }, + "harness|lambada:openai|0": { + "perplexity,none": 32349.9573081135, + "perplexity_stderr,none": 2095.2762937765037, + "acc,none": 0.021540849990296915, + "acc_stderr,none": 0.0020226225294743745, + "alias": "lambada_openai" + }, + "harness|boolq|0": { + "acc,none": 0.4434250764525994, + "acc_stderr,none": 0.008688893661318225, + "alias": "boolq" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.4652237988680654, + "acc_stderr,none": 0.015979473562610813, + "alias": "truthfulqa_mc2" + }, + "harness|arc:challenge|0": { + "acc,none": 0.20477815699658702, + "acc_stderr,none": 0.01179254433851342, + "acc_norm,none": 0.25853242320819114, + "acc_norm_stderr,none": 0.012794553754288684, + "alias": "arc_challenge" + }, + "harness|openbookqa|0": { + "acc,none": 0.12, + "acc_stderr,none": 0.014547276256845676, + "acc_norm,none": 0.254, + "acc_norm_stderr,none": 0.01948659680164337, + "alias": "openbookqa" + }, + "harness|mmlu|0": { + "acc,none": 0.241347386412192, + "acc_stderr,none": 0.00360771390508836, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.2463336875664187, + "acc_stderr,none": 0.0062803360384809815 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.25396825396825395, + "acc_stderr,none": 0.03893259610604673 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.20606060606060606, + "acc_stderr,none": 0.0315841532404771 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.24509803921568626, + "acc_stderr,none": 0.030190282453501967 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.2911392405063291, + "acc_stderr,none": 0.029571601065753374 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.23140495867768596, + "acc_stderr,none": 0.03849856098794089 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.32407407407407407, + "acc_stderr,none": 0.04524596007030049 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.22085889570552147, + "acc_stderr,none": 0.03259177392742178 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.25722543352601157, + "acc_stderr,none": 0.023532925431044297 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.2569832402234637, + "acc_stderr,none": 0.014614465821966356 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.2090032154340836, + "acc_stderr,none": 0.02309314039837422 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.22839506172839505, + "acc_stderr,none": 0.023358211840626267 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.23859191655801826, + "acc_stderr,none": 0.010885929742002205 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.2982456140350877, + "acc_stderr,none": 0.035087719298245654 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.25523012552301255, + "acc_stderr,none": 0.007826615150571035 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.25, + "acc_stderr,none": 0.04351941398892446 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.26037735849056604, + "acc_stderr,none": 0.02700876609070809 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.2543352601156069, + "acc_stderr,none": 0.0332055644308557 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.24, + "acc_stderr,none": 0.04292346959909283 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.2825112107623318, + "acc_stderr,none": 0.03021683101150878 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.1941747572815534, + "acc_stderr,none": 0.03916667762822583 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.26495726495726496, + "acc_stderr,none": 0.028911208802749482 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.28, + "acc_stderr,none": 0.045126085985421276 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.2681992337164751, + "acc_stderr,none": 0.015842430835269445 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.21241830065359477, + "acc_stderr,none": 0.023420375478296132 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.24822695035460993, + "acc_stderr,none": 0.025770015644290406 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.23529411764705882, + "acc_stderr,none": 0.025767252010855984 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.29518072289156627, + "acc_stderr,none": 0.0355092018568963 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.22684432889177772, + "acc_stderr,none": 0.007551233956089617 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.21929824561403508, + "acc_stderr,none": 0.038924311065187525 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.20707070707070707, + "acc_stderr,none": 0.02886977846026704 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.23834196891191708, + "acc_stderr,none": 0.030748905363909902 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.2282051282051282, + "acc_stderr,none": 0.021278393863586275 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.24369747899159663, + "acc_stderr,none": 0.027886828078380554 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.1889908256880734, + "acc_stderr,none": 0.01678548115920362 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.2824427480916031, + "acc_stderr,none": 0.03948406125768361 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.24673202614379086, + "acc_stderr,none": 0.017440820367402507 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.22727272727272727, + "acc_stderr,none": 0.040139645540727735 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.2, + "acc_stderr,none": 0.025607375986579153 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.24875621890547264, + "acc_stderr,none": 0.03056767593891672 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.24, + "acc_stderr,none": 0.04292346959909282 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.23437995559784333, + "acc_stderr,none": 0.007518994303407856 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.2, + "acc_stderr,none": 0.04020151261036844 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.17777777777777778, + "acc_stderr,none": 0.03302789859901718 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.17105263157894737, + "acc_stderr,none": 0.030643607071677088 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.2638888888888889, + "acc_stderr,none": 0.03685651095897532 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.23, + "acc_stderr,none": 0.04229525846816506 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.26, + "acc_stderr,none": 0.04408440022768079 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.25, + "acc_stderr,none": 0.04351941398892446 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.3333333333333333, + "acc_stderr,none": 0.04690650298201942 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.31, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.2425531914893617, + "acc_stderr,none": 0.028020226271200214 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.33793103448275863, + "acc_stderr,none": 0.039417076320648906 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.20899470899470898, + "acc_stderr,none": 0.020940481565334842 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.20967741935483872, + "acc_stderr,none": 0.02315787934908353 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.21674876847290642, + "acc_stderr,none": 0.02899033125251624 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.26, + "acc_stderr,none": 0.04408440022768078 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.1925925925925926, + "acc_stderr,none": 0.02404307518194519 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.2582781456953642, + "acc_stderr,none": 0.035737053147634576 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.19907407407407407, + "acc_stderr,none": 0.027232298462690225 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.3392857142857143, + "acc_stderr,none": 0.04493949068613539 + }, + "harness|hellaswag|0": { + "acc,none": 0.2787293367855009, + "acc_stderr,none": 0.004474577054517459, + "acc_norm,none": 0.2953594901414061, + "acc_norm_stderr,none": 0.004552718360513102, + "alias": "hellaswag" + }, + "harness|piqa|0": { + "acc,none": 0.544069640914037, + "acc_stderr,none": 0.011620422647622235, + "acc_norm,none": 0.5402611534276387, + "acc_norm_stderr,none": 0.011627942981817168, + "alias": "piqa" + } + }, + "task_info": { + "model": "kreas/Mistral-7B-v0.1-GPTQ-2bit", + "revision": "main", + "private": false, + "params": 7, + "architectures": "MistralForCausalLM", + "quant_type": "GPTQ", + "precision": "2bit", + "model_params": 7, + "model_size": 2.4, + "weight_dtype": "int2", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Waiting", + "submitted_time": "2024-05-13T11:54:45Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "batch_size": 1, + "bits": 2, + "block_name_to_quantize": null, + "cache_block_outputs": true, + "damp_percent": 0.1, + "dataset": "wikitext2", + "desc_act": false, + "exllama_config": { + "version": 1 + }, + "group_size": 128, + "max_input_length": null, + "model_seqlen": null, + "module_name_preceding_first_block": null, + "modules_in_block_to_quantize": null, + "pad_token_id": null, + "quant_method": "gptq", + "sym": true, + "tokenizer": null, + "true_sequential": true, + "use_cuda_fp16": false, + "use_exllama": true + }, + "versions": { + "harness|truthfulqa:mc1|0": 2.0, + "harness|winogrande|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|boolq|0": 2.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|arc:challenge|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|hellaswag|0": 1.0, + "harness|piqa|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715693601.6570477, + "config": { + "model": "hf", + "model_args": "pretrained=kreas/Mistral-7B-v0.1-GPTQ-2bit,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/kreas/results_2024-05-15-09-05-02.json b/kreas/results_2024-05-15-09-05-02.json new file mode 100644 index 0000000000000000000000000000000000000000..dd40755ad2aa80a70f4696804a0e707c886067b2 --- /dev/null +++ b/kreas/results_2024-05-15-09-05-02.json @@ -0,0 +1,599 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-15-09-05-02", + "total_evaluation_time_secondes": "", + "model_name": "kreas/Llama-2-7b-hf-GPTQ-3bit", + "model_sha": "", + "model_dtype": "3bit", + "model_size": 3.08, + "model_params": 7, + "quant_type": "GPTQ", + "precision": "3bit" + }, + "results": { + "harness|truthfulqa:mc1|0": { + "acc,none": 0.24112607099143207, + "acc_stderr,none": 0.014974827279752337, + "alias": "truthfulqa_mc1" + }, + "harness|boolq|0": { + "acc,none": 0.7195718654434251, + "acc_stderr,none": 0.007856704488285309, + "alias": "boolq" + }, + "harness|piqa|0": { + "acc,none": 0.750816104461371, + "acc_stderr,none": 0.01009188277012022, + "acc_norm,none": 0.7595212187159956, + "acc_norm_stderr,none": 0.009971345364651073, + "alias": "piqa" + }, + "harness|arc:challenge|0": { + "acc,none": 0.36006825938566556, + "acc_stderr,none": 0.014027516814585183, + "acc_norm,none": 0.40102389078498296, + "acc_norm_stderr,none": 0.014322255790719864, + "alias": "arc_challenge" + }, + "harness|mmlu|0": { + "acc,none": 0.292693348525851, + "acc_stderr,none": 0.0038263905446511304, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.287778958554729, + "acc_stderr,none": 0.00658867562852317 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.29365079365079366, + "acc_stderr,none": 0.04073524322147126 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.24848484848484848, + "acc_stderr,none": 0.03374402644139404 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.3333333333333333, + "acc_stderr,none": 0.03308611113236435 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.31645569620253167, + "acc_stderr,none": 0.030274974880218977 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.3305785123966942, + "acc_stderr,none": 0.04294340845212094 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.3333333333333333, + "acc_stderr,none": 0.04557239513497751 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.294478527607362, + "acc_stderr,none": 0.03581165790474082 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.28901734104046245, + "acc_stderr,none": 0.024405173935783238 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.23910614525139665, + "acc_stderr,none": 0.014265554192331154 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.26688102893890675, + "acc_stderr,none": 0.025122637608816643 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.30864197530864196, + "acc_stderr,none": 0.02570264026060375 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.28878748370273793, + "acc_stderr,none": 0.011574914757219954 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.40350877192982454, + "acc_stderr,none": 0.03762738699917055 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.321853878339234, + "acc_stderr,none": 0.008352513591347591 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.42, + "acc_stderr,none": 0.049604496374885836 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.3018867924528302, + "acc_stderr,none": 0.028254200344438665 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.2543352601156069, + "acc_stderr,none": 0.0332055644308557 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.32, + "acc_stderr,none": 0.04688261722621505 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.34977578475336324, + "acc_stderr,none": 0.03200736719484503 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.2912621359223301, + "acc_stderr,none": 0.044986763205729224 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.3803418803418803, + "acc_stderr,none": 0.031804252043840985 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.32, + "acc_stderr,none": 0.04688261722621504 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.3665389527458493, + "acc_stderr,none": 0.017231244626797048 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.3235294117647059, + "acc_stderr,none": 0.026787453111906532 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.25886524822695034, + "acc_stderr,none": 0.026129572527180848 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.2426470588235294, + "acc_stderr,none": 0.02604066247420126 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.2891566265060241, + "acc_stderr,none": 0.03529486801511115 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.2963925901852454, + "acc_stderr,none": 0.008200975671248108 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.22807017543859648, + "acc_stderr,none": 0.03947152782669415 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.23232323232323232, + "acc_stderr,none": 0.030088629490217483 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.3160621761658031, + "acc_stderr,none": 0.033553973696861736 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.26666666666666666, + "acc_stderr,none": 0.022421273612923714 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.2647058823529412, + "acc_stderr,none": 0.028657491285071963 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.30642201834862387, + "acc_stderr,none": 0.01976551722045852 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.2900763358778626, + "acc_stderr,none": 0.03980066246467765 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.33986928104575165, + "acc_stderr,none": 0.019162418588623564 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.33636363636363636, + "acc_stderr,none": 0.04525393596302506 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.21224489795918366, + "acc_stderr,none": 0.026176967197866767 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.3283582089552239, + "acc_stderr,none": 0.033206858897443244 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.44, + "acc_stderr,none": 0.049888765156985884 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.26768157310497936, + "acc_stderr,none": 0.007869789861837485 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.22, + "acc_stderr,none": 0.04163331998932269 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.31851851851851853, + "acc_stderr,none": 0.040247784019771096 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.3355263157894737, + "acc_stderr,none": 0.03842498559395268 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.3194444444444444, + "acc_stderr,none": 0.03899073687357336 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.17, + "acc_stderr,none": 0.0377525168068637 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.24, + "acc_stderr,none": 0.04292346959909281 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.32, + "acc_stderr,none": 0.04688261722621504 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.22549019607843138, + "acc_stderr,none": 0.041583075330832865 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.34, + "acc_stderr,none": 0.047609522856952344 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.31063829787234043, + "acc_stderr,none": 0.03025123757921317 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.27586206896551724, + "acc_stderr,none": 0.03724563619774632 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.2328042328042328, + "acc_stderr,none": 0.02176596167215453 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.2903225806451613, + "acc_stderr,none": 0.0258221061194159 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.2413793103448276, + "acc_stderr,none": 0.030108330718011625 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.23, + "acc_stderr,none": 0.04229525846816506 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.23703703703703705, + "acc_stderr,none": 0.025928876132766135 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.2847682119205298, + "acc_stderr,none": 0.03684881521389023 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.2175925925925926, + "acc_stderr,none": 0.02813968944485968 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.3125, + "acc_stderr,none": 0.043994650575715215 + }, + "harness|openbookqa|0": { + "acc,none": 0.262, + "acc_stderr,none": 0.019684688820194716, + "acc_norm,none": 0.4, + "acc_norm_stderr,none": 0.0219308441207285, + "alias": "openbookqa" + }, + "harness|lambada:openai|0": { + "perplexity,none": 5.524690677068936, + "perplexity_stderr,none": 0.12491851399272051, + "acc,none": 0.6372986609741897, + "acc_stderr,none": 0.006698200684488456, + "alias": "lambada_openai" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.38574848458989336, + "acc_stderr,none": 0.01351754999471895, + "alias": "truthfulqa_mc2" + }, + "harness|hellaswag|0": { + "acc,none": 0.5218084047002589, + "acc_stderr,none": 0.004985032806802445, + "acc_norm,none": 0.7056363274248157, + "acc_norm_stderr,none": 0.004548247487546316, + "alias": "hellaswag" + }, + "harness|winogrande|0": { + "acc,none": 0.664561957379637, + "acc_stderr,none": 0.013269575904851434, + "alias": "winogrande" + }, + "harness|arc:easy|0": { + "acc,none": 0.6957070707070707, + "acc_stderr,none": 0.009441202922359185, + "acc_norm,none": 0.6494107744107744, + "acc_norm_stderr,none": 0.00979100382983156, + "alias": "arc_easy" + } + }, + "task_info": { + "model": "kreas/Llama-2-7b-hf-GPTQ-3bit", + "revision": "main", + "private": false, + "params": 7, + "architectures": "LlamaForCausalLM", + "quant_type": "GPTQ", + "precision": "3bit", + "model_params": 7, + "model_size": 3.08, + "weight_dtype": "int3", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Waiting", + "submitted_time": "2024-05-13T11:54:45Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "batch_size": 1, + "bits": 3, + "block_name_to_quantize": null, + "cache_block_outputs": true, + "damp_percent": 0.1, + "dataset": "wikitext2", + "desc_act": false, + "exllama_config": { + "version": 1 + }, + "group_size": 128, + "max_input_length": null, + "model_seqlen": null, + "module_name_preceding_first_block": null, + "modules_in_block_to_quantize": null, + "pad_token_id": null, + "quant_method": "gptq", + "sym": true, + "tokenizer": null, + "true_sequential": true, + "use_cuda_fp16": false, + "use_exllama": true + }, + "versions": { + "harness|truthfulqa:mc1|0": 2.0, + "harness|boolq|0": 2.0, + "harness|piqa|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|openbookqa|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|hellaswag|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|arc:easy|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715721484.0391965, + "config": { + "model": "hf", + "model_args": "pretrained=kreas/Llama-2-7b-hf-GPTQ-3bit,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/leliuga/results_2024-04-30-13-36-35.json b/leliuga/results_2024-04-30-13-36-35.json new file mode 100644 index 0000000000000000000000000000000000000000..159915513f7a0e87889d8dada74564c7d8fcdcce --- /dev/null +++ b/leliuga/results_2024-04-30-13-36-35.json @@ -0,0 +1,590 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-04-30-13-36-35", + "total_evaluation_time_secondes": "", + "model_name": "leliuga/Phi-3-mini-4k-instruct-bnb-4bit", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 2.26, + "model_params": 3.74, + "quant_type": "bitsandbytes", + "precision": "4bit" + }, + "results": { + "harness|openbookqa|0": { + "acc,none": 0.38, + "acc_stderr,none": 0.021728881438701716, + "acc_norm,none": 0.454, + "acc_norm_stderr,none": 0.022288147591176945, + "alias": "openbookqa" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.5580592365477883, + "acc_stderr,none": 0.015331592955190172, + "alias": "truthfulqa_mc2" + }, + "harness|hellaswag|0": { + "acc,none": 0.5970922127066322, + "acc_stderr,none": 0.004894801119898624, + "acc_norm,none": 0.7801234813782115, + "acc_norm_stderr,none": 0.0041331638053174495, + "alias": "hellaswag" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.37209302325581395, + "acc_stderr,none": 0.016921090118814035, + "alias": "truthfulqa_mc1" + }, + "harness|piqa|0": { + "acc,none": 0.7889009793253536, + "acc_stderr,none": 0.009521377378734167, + "acc_norm,none": 0.7932535364526659, + "acc_norm_stderr,none": 0.00944866551418327, + "alias": "piqa" + }, + "harness|arc:easy|0": { + "acc,none": 0.8308080808080808, + "acc_stderr,none": 0.007693223639488826, + "acc_norm,none": 0.8000841750841751, + "acc_norm_stderr,none": 0.008206531105458863, + "alias": "arc_easy" + }, + "harness|mmlu|0": { + "acc,none": 0.6661444238712434, + "acc_stderr,none": 0.0037896788441303046, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.6289054197662062, + "acc_stderr,none": 0.006790926904899143 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.5634920634920635, + "acc_stderr,none": 0.04435932892851466 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7878787878787878, + "acc_stderr,none": 0.03192271569548301 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.7745098039215687, + "acc_stderr,none": 0.02933116229425172 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.8059071729957806, + "acc_stderr,none": 0.025744902532290934 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.8264462809917356, + "acc_stderr,none": 0.0345727283691767 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7222222222222222, + "acc_stderr,none": 0.043300437496507416 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.8220858895705522, + "acc_stderr,none": 0.03004735765580663 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.7254335260115607, + "acc_stderr,none": 0.02402774515526502 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.5094972067039106, + "acc_stderr,none": 0.01671948464334877 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.7170418006430869, + "acc_stderr,none": 0.025583062489984838 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.7685185185185185, + "acc_stderr,none": 0.02346842983245116 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.5091264667535854, + "acc_stderr,none": 0.012768108601640016 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.8011695906432749, + "acc_stderr,none": 0.03061111655743253 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.7093659478596717, + "acc_stderr,none": 0.007845883201569273 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.67, + "acc_stderr,none": 0.04725815626252609 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.7584905660377359, + "acc_stderr,none": 0.026341480371118352 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.6589595375722543, + "acc_stderr,none": 0.036146654241808254 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.36, + "acc_stderr,none": 0.048241815132442176 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6771300448430493, + "acc_stderr,none": 0.031381476375754995 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.8252427184466019, + "acc_stderr,none": 0.0376017800602662 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8803418803418803, + "acc_stderr,none": 0.02126271940040694 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.72, + "acc_stderr,none": 0.04512608598542127 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.8186462324393359, + "acc_stderr,none": 0.013778693778464081 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.7058823529411765, + "acc_stderr,none": 0.02609016250427904 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.524822695035461, + "acc_stderr,none": 0.02979071924382972 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.6801470588235294, + "acc_stderr,none": 0.02833295951403123 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.4939759036144578, + "acc_stderr,none": 0.03892212195333045 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.7757556061098473, + "acc_stderr,none": 0.007371597574029847 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.5087719298245614, + "acc_stderr,none": 0.04702880432049615 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.8383838383838383, + "acc_stderr,none": 0.026225919863629293 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8652849740932642, + "acc_stderr,none": 0.024639789097709443 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.7, + "acc_stderr,none": 0.023234581088428487 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.7899159663865546, + "acc_stderr,none": 0.026461398717471878 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.8752293577981651, + "acc_stderr,none": 0.014168298359156352 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.7251908396946565, + "acc_stderr,none": 0.039153454088478354 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.7173202614379085, + "acc_stderr,none": 0.018217269552053435 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.7090909090909091, + "acc_stderr,none": 0.04350271442923243 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.7755102040816326, + "acc_stderr,none": 0.026711430555538398 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.8606965174129353, + "acc_stderr,none": 0.02448448716291397 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.83, + "acc_stderr,none": 0.03775251680686371 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.5721535045987948, + "acc_stderr,none": 0.008404208733097255 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.42, + "acc_stderr,none": 0.049604496374885836 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.6296296296296297, + "acc_stderr,none": 0.041716541613545426 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.7368421052631579, + "acc_stderr,none": 0.03583496176361072 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.8194444444444444, + "acc_stderr,none": 0.032166008088022675 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.47, + "acc_stderr,none": 0.05016135580465919 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.47, + "acc_stderr,none": 0.050161355804659205 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.34, + "acc_stderr,none": 0.047609522856952344 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.43137254901960786, + "acc_stderr,none": 0.04928099597287534 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.77, + "acc_stderr,none": 0.04229525846816506 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.6340425531914894, + "acc_stderr,none": 0.0314895582974553 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5793103448275863, + "acc_stderr,none": 0.0411391498118926 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.4973544973544973, + "acc_stderr,none": 0.02575094967813038 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.832258064516129, + "acc_stderr,none": 0.021255464065371335 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.5862068965517241, + "acc_stderr,none": 0.03465304488406795 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.7, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.3296296296296296, + "acc_stderr,none": 0.028661201116524586 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.4370860927152318, + "acc_stderr,none": 0.04050035722230636 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.5324074074074074, + "acc_stderr,none": 0.034028015813589656 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.5357142857142857, + "acc_stderr,none": 0.04733667890053756 + }, + "harness|arc:challenge|0": { + "acc,none": 0.5691126279863481, + "acc_stderr,none": 0.014471133392642475, + "acc_norm,none": 0.5836177474402731, + "acc_norm_stderr,none": 0.014405618279436178, + "alias": "arc_challenge" + }, + "harness|winogrande|0": { + "acc,none": 0.7269139700078927, + "acc_stderr,none": 0.012522020105869456, + "alias": "winogrande" + }, + "harness|boolq|0": { + "acc,none": 0.8602446483180428, + "acc_stderr,none": 0.0060643988004343554, + "alias": "boolq" + }, + "harness|lambada:openai|0": { + "perplexity,none": 4.367293923780302, + "perplexity_stderr,none": 0.10618058268537012, + "acc,none": 0.6745585096060547, + "acc_stderr,none": 0.006527672323541589, + "alias": "lambada_openai" + } + }, + "task_info": { + "model": "leliuga/Phi-3-mini-4k-instruct-bnb-4bit", + "revision": "main", + "private": false, + "params": 8.264, + "architectures": "Phi3ForCausalLM", + "quant_type": "bitsandbytes", + "precision": "4bit", + "model_params": 16.528, + "model_size": 8.264, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-29T07:25:56Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "_load_in_4bit": true, + "_load_in_8bit": false, + "bnb_4bit_compute_dtype": "bfloat16", + "bnb_4bit_quant_storage": "uint8", + "bnb_4bit_quant_type": "nf4", + "bnb_4bit_use_double_quant": true, + "llm_int8_enable_fp32_cpu_offload": false, + "llm_int8_has_fp16_weight": false, + "llm_int8_skip_modules": null, + "llm_int8_threshold": 6.0, + "load_in_4bit": true, + "load_in_8bit": false, + "quant_method": "bitsandbytes" + }, + "versions": { + "harness|openbookqa|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|hellaswag|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|piqa|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|arc:challenge|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|boolq|0": 2.0, + "harness|lambada:openai|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714442777.7010922, + "config": { + "model": "hf", + "model_args": "pretrained=leliuga/Phi-3-mini-4k-instruct-bnb-4bit,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/leliuga/results_2024-05-10-12-37-10.json b/leliuga/results_2024-05-10-12-37-10.json new file mode 100644 index 0000000000000000000000000000000000000000..3e55d25fba50eea0bd1e73769789f3771b35c21b --- /dev/null +++ b/leliuga/results_2024-05-10-12-37-10.json @@ -0,0 +1,586 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-10-12-37-10", + "total_evaluation_time_secondes": "", + "model_name": "leliuga/phi-2-bnb-4bit", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 1.82, + "model_params": 2.7, + "quant_type": "bitsandbytes", + "precision": "4bit" + }, + "results": { + "harness|piqa|0": { + "acc,none": 0.778563656147987, + "acc_stderr,none": 0.009687616456840253, + "acc_norm,none": 0.7834602829162133, + "acc_norm_stderr,none": 0.009609984714384614, + "alias": "piqa" + }, + "harness|mmlu|0": { + "acc,none": 0.5151687793761572, + "acc_stderr,none": 0.004026508053034632, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.4731137088204038, + "acc_stderr,none": 0.006892448037308947 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.3253968253968254, + "acc_stderr,none": 0.041905964388711366 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.6424242424242425, + "acc_stderr,none": 0.037425970438065864 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.6176470588235294, + "acc_stderr,none": 0.03410785338904719 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.6877637130801688, + "acc_stderr,none": 0.030165137867847015 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.7272727272727273, + "acc_stderr,none": 0.04065578140908705 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.6851851851851852, + "acc_stderr,none": 0.04489931073591312 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7239263803680982, + "acc_stderr,none": 0.03512385283705048 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.6271676300578035, + "acc_stderr,none": 0.026033890613576277 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.24134078212290502, + "acc_stderr,none": 0.01431099954796145 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.5466237942122186, + "acc_stderr,none": 0.028274359854894245 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.5833333333333334, + "acc_stderr,none": 0.027431623722415015 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.39308996088657105, + "acc_stderr,none": 0.012474899613873968 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.672514619883041, + "acc_stderr,none": 0.035993357714560276 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.5709687801738011, + "acc_stderr,none": 0.008615724990192988 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.51, + "acc_stderr,none": 0.05024183937956912 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.5962264150943396, + "acc_stderr,none": 0.03019761160019795 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.5317919075144508, + "acc_stderr,none": 0.03804749744364764 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.35, + "acc_stderr,none": 0.04793724854411018 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.600896860986547, + "acc_stderr,none": 0.032867453125679603 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.6310679611650486, + "acc_stderr,none": 0.04777615181156739 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8076923076923077, + "acc_stderr,none": 0.025819233256483727 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.55, + "acc_stderr,none": 0.05 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.669220945083014, + "acc_stderr,none": 0.016824818462563753 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.5849673202614379, + "acc_stderr,none": 0.028213504177824096 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.3971631205673759, + "acc_stderr,none": 0.029189805673587112 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.3786764705882353, + "acc_stderr,none": 0.02946513363977613 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.463855421686747, + "acc_stderr,none": 0.03882310850890594 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.6161845953851154, + "acc_stderr,none": 0.008586508465689755 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.3157894736842105, + "acc_stderr,none": 0.04372748290278007 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.6818181818181818, + "acc_stderr,none": 0.03318477333845331 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.689119170984456, + "acc_stderr,none": 0.03340361906276587 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.5153846153846153, + "acc_stderr,none": 0.02533900301010651 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.5714285714285714, + "acc_stderr,none": 0.032145368597886394 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.7302752293577982, + "acc_stderr,none": 0.01902848671111544 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.6183206106870229, + "acc_stderr,none": 0.0426073515764456 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.5310457516339869, + "acc_stderr,none": 0.02018880445636189 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6636363636363637, + "acc_stderr,none": 0.04525393596302505 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.6489795918367347, + "acc_stderr,none": 0.030555316755573634 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.7263681592039801, + "acc_stderr,none": 0.03152439186555401 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.73, + "acc_stderr,none": 0.044619604333847394 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.42435775451950525, + "acc_stderr,none": 0.008572231891014966 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.28, + "acc_stderr,none": 0.04512608598542126 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.45185185185185184, + "acc_stderr,none": 0.04299268905480864 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.5328947368421053, + "acc_stderr,none": 0.040601270352363966 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.5347222222222222, + "acc_stderr,none": 0.041711158581816184 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.34, + "acc_stderr,none": 0.04760952285695235 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.31, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.24509803921568626, + "acc_stderr,none": 0.04280105837364395 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.59, + "acc_stderr,none": 0.04943110704237102 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.4765957446808511, + "acc_stderr,none": 0.03265019475033582 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.4689655172413793, + "acc_stderr,none": 0.04158632762097828 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.3994708994708995, + "acc_stderr,none": 0.025225450284067877 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.632258064516129, + "acc_stderr,none": 0.02743086657997347 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.4088669950738916, + "acc_stderr,none": 0.034590588158832314 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.63, + "acc_stderr,none": 0.04852365870939099 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.26666666666666666, + "acc_stderr,none": 0.026962424325073835 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.31788079470198677, + "acc_stderr,none": 0.03802039760107903 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.3425925925925926, + "acc_stderr,none": 0.032365852526021574 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.4017857142857143, + "acc_stderr,none": 0.04653333146973646 + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.4481397945521129, + "acc_stderr,none": 0.01507831322796592, + "alias": "truthfulqa_mc2" + }, + "harness|arc:easy|0": { + "acc,none": 0.7962962962962963, + "acc_stderr,none": 0.008264279630493445, + "acc_norm,none": 0.7798821548821548, + "acc_norm_stderr,none": 0.00850178877471678, + "alias": "arc_easy" + }, + "harness|arc:challenge|0": { + "acc,none": 0.5204778156996587, + "acc_stderr,none": 0.01459913135303501, + "acc_norm,none": 0.5307167235494881, + "acc_norm_stderr,none": 0.014583792546304037, + "alias": "arc_challenge" + }, + "harness|boolq|0": { + "acc,none": 0.8110091743119267, + "acc_stderr,none": 0.006847401355319964, + "alias": "boolq" + }, + "harness|openbookqa|0": { + "acc,none": 0.404, + "acc_stderr,none": 0.021966635293832915, + "acc_norm,none": 0.486, + "acc_norm_stderr,none": 0.02237429816635319, + "alias": "openbookqa" + }, + "harness|hellaswag|0": { + "acc,none": 0.5468034256124278, + "acc_stderr,none": 0.0049678724753832735, + "acc_norm,none": 0.7300338577972515, + "acc_norm_stderr,none": 0.004430346234650381, + "alias": "hellaswag" + }, + "harness|winogrande|0": { + "acc,none": 0.755327545382794, + "acc_stderr,none": 0.012082125654159738, + "alias": "winogrande" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.31334149326805383, + "acc_stderr,none": 0.016238065069059587, + "alias": "truthfulqa_mc1" + }, + "harness|lambada:openai|0": { + "perplexity,none": 6.33977085695839, + "perplexity_stderr,none": 0.17580168952959996, + "acc,none": 0.606636910537551, + "acc_stderr,none": 0.006805707684492764, + "alias": "lambada_openai" + } + }, + "task_info": { + "model": "leliuga/phi-2-bnb-4bit", + "revision": "main", + "private": false, + "params": 6.244, + "architectures": "PhiForCausalLM", + "quant_type": "bitsandbytes", + "precision": "4bit", + "model_params": 2.7, + "model_size": 1.82, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-29T07:14:10Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "bnb_4bit_compute_dtype": "float16", + "bnb_4bit_quant_type": "nf4", + "bnb_4bit_use_double_quant": true, + "llm_int8_enable_fp32_cpu_offload": false, + "llm_int8_has_fp16_weight": false, + "llm_int8_skip_modules": null, + "llm_int8_threshold": 6.0, + "load_in_4bit": true, + "load_in_8bit": false + }, + "versions": { + "harness|piqa|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|arc:easy|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|boolq|0": 2.0, + "harness|openbookqa|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|lambada:openai|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715312464.514766, + "config": { + "model": "hf", + "model_args": "pretrained=leliuga/phi-2-bnb-4bit,trust_remote_code=False,dtype=float16,_commit_hash=main", + "batch_size": 4, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/leliuga/results_2024-05-10-18-41-39.json b/leliuga/results_2024-05-10-18-41-39.json new file mode 100644 index 0000000000000000000000000000000000000000..7a5194e6aa594269475d579763b73321b42ce986 --- /dev/null +++ b/leliuga/results_2024-05-10-18-41-39.json @@ -0,0 +1,590 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-10-18-41-39", + "total_evaluation_time_secondes": "", + "model_name": "leliuga/Phi-3-mini-128k-instruct-bnb-4bit", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 2.26, + "model_params": 3.74, + "quant_type": "bitsandbytes", + "precision": "4bit" + }, + "results": { + "harness|winogrande|0": { + "acc,none": 0.7292817679558011, + "acc_stderr,none": 0.012487904760626299, + "alias": "winogrande" + }, + "harness|mmlu|0": { + "acc,none": 0.6212078051559606, + "acc_stderr,none": 0.003894192537280643, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5885228480340063, + "acc_stderr,none": 0.006886415266074779 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.5634920634920635, + "acc_stderr,none": 0.04435932892851466 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7636363636363637, + "acc_stderr,none": 0.03317505930009181 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.7598039215686274, + "acc_stderr,none": 0.02998373305591361 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.7763713080168776, + "acc_stderr,none": 0.027123298205229966 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.8347107438016529, + "acc_stderr,none": 0.03390780612972776 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7129629629629629, + "acc_stderr,none": 0.043733130409147614 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7914110429447853, + "acc_stderr,none": 0.03192193448934724 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.6473988439306358, + "acc_stderr,none": 0.025722802200895817 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.45363128491620114, + "acc_stderr,none": 0.016650437588269073 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6141479099678456, + "acc_stderr,none": 0.02764814959975147 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.7685185185185185, + "acc_stderr,none": 0.023468429832451163 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.46936114732724904, + "acc_stderr,none": 0.012746237711716634 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7953216374269005, + "acc_stderr,none": 0.030944459778533204 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.668490505310589, + "acc_stderr,none": 0.008155964756228174 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.6, + "acc_stderr,none": 0.049236596391733084 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.6981132075471698, + "acc_stderr,none": 0.02825420034443866 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.5664739884393064, + "acc_stderr,none": 0.037786210790920566 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.43, + "acc_stderr,none": 0.049756985195624284 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6233183856502242, + "acc_stderr,none": 0.032521134899291884 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.7572815533980582, + "acc_stderr,none": 0.04245022486384495 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8632478632478633, + "acc_stderr,none": 0.022509033937077812 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.68, + "acc_stderr,none": 0.046882617226215034 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.7994891443167306, + "acc_stderr,none": 0.014317653708594206 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.6405228758169934, + "acc_stderr,none": 0.027475969910660952 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.4858156028368794, + "acc_stderr,none": 0.02981549448368206 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.6176470588235294, + "acc_stderr,none": 0.02952009569768776 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.463855421686747, + "acc_stderr,none": 0.03882310850890594 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.7286317842053949, + "acc_stderr,none": 0.00782447594044952 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.4649122807017544, + "acc_stderr,none": 0.046920083813689104 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7525252525252525, + "acc_stderr,none": 0.03074630074212451 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8497409326424871, + "acc_stderr,none": 0.025787723180723886 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.6230769230769231, + "acc_stderr,none": 0.024570975364225995 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.7058823529411765, + "acc_stderr,none": 0.029597329730978103 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.8458715596330275, + "acc_stderr,none": 0.01548082686537429 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.7099236641221374, + "acc_stderr,none": 0.03980066246467765 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.6503267973856209, + "acc_stderr,none": 0.01929196189506638 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6636363636363637, + "acc_stderr,none": 0.04525393596302506 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.7673469387755102, + "acc_stderr,none": 0.02704925791589618 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.8407960199004975, + "acc_stderr,none": 0.02587064676616913 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.83, + "acc_stderr,none": 0.03775251680686371 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.5185537583254044, + "acc_stderr,none": 0.008501678075529097 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.28, + "acc_stderr,none": 0.045126085985421276 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.5851851851851851, + "acc_stderr,none": 0.04256193767901408 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.743421052631579, + "acc_stderr,none": 0.035541803680256896 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.7222222222222222, + "acc_stderr,none": 0.037455547914624576 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.4, + "acc_stderr,none": 0.04923659639173309 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.4, + "acc_stderr,none": 0.04923659639173309 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.31, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.3627450980392157, + "acc_stderr,none": 0.047840607041056527 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.67, + "acc_stderr,none": 0.047258156262526094 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.6042553191489362, + "acc_stderr,none": 0.031967586978353627 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.47586206896551725, + "acc_stderr,none": 0.041618085035015295 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.4603174603174603, + "acc_stderr,none": 0.02567008063690919 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7612903225806451, + "acc_stderr,none": 0.024251071262208837 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.5812807881773399, + "acc_stderr,none": 0.03471192860518468 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.64, + "acc_stderr,none": 0.04824181513244218 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.3, + "acc_stderr,none": 0.02794045713622842 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.3576158940397351, + "acc_stderr,none": 0.03913453431177258 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.48148148148148145, + "acc_stderr,none": 0.034076320938540516 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.48214285714285715, + "acc_stderr,none": 0.047427623612430116 + }, + "harness|hellaswag|0": { + "acc,none": 0.5918143796056562, + "acc_stderr,none": 0.004904933500255886, + "acc_norm,none": 0.7753435570603465, + "acc_norm_stderr,none": 0.004165029164361697, + "alias": "hellaswag" + }, + "harness|piqa|0": { + "acc,none": 0.7829162132752993, + "acc_stderr,none": 0.009618708415756775, + "acc_norm,none": 0.7872687704026116, + "acc_norm_stderr,none": 0.009548223123047376, + "alias": "piqa" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.5202736095699407, + "acc_stderr,none": 0.01533085548161714, + "alias": "truthfulqa_mc2" + }, + "harness|arc:challenge|0": { + "acc,none": 0.5511945392491467, + "acc_stderr,none": 0.014534599585097665, + "acc_norm,none": 0.5494880546075085, + "acc_norm_stderr,none": 0.014539646098471625, + "alias": "arc_challenge" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.3537331701346389, + "acc_stderr,none": 0.016737814358846147, + "alias": "truthfulqa_mc1" + }, + "harness|arc:easy|0": { + "acc,none": 0.8160774410774411, + "acc_stderr,none": 0.007949711712533548, + "acc_norm,none": 0.7845117845117845, + "acc_norm_stderr,none": 0.008436837633389652, + "alias": "arc_easy" + }, + "harness|boolq|0": { + "acc,none": 0.8629969418960245, + "acc_stderr,none": 0.006013984421211777, + "alias": "boolq" + }, + "harness|openbookqa|0": { + "acc,none": 0.362, + "acc_stderr,none": 0.021513662527582404, + "acc_norm,none": 0.44, + "acc_norm_stderr,none": 0.02222133153414306, + "alias": "openbookqa" + }, + "harness|lambada:openai|0": { + "perplexity,none": 4.763628491475744, + "perplexity_stderr,none": 0.11587026350219728, + "acc,none": 0.6617504366388511, + "acc_stderr,none": 0.006591402212162809, + "alias": "lambada_openai" + } + }, + "task_info": { + "model": "leliuga/Phi-3-mini-128k-instruct-bnb-4bit", + "revision": "main", + "private": false, + "params": 2.26, + "architectures": "Phi3ForCausalLM", + "quant_type": "bitsandbytes", + "precision": "4bit", + "model_params": 3.74, + "model_size": 2.26, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-05-10T07:32:00Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "_load_in_4bit": true, + "_load_in_8bit": false, + "bnb_4bit_compute_dtype": "bfloat16", + "bnb_4bit_quant_storage": "uint8", + "bnb_4bit_quant_type": "nf4", + "bnb_4bit_use_double_quant": true, + "llm_int8_enable_fp32_cpu_offload": false, + "llm_int8_has_fp16_weight": false, + "llm_int8_skip_modules": null, + "llm_int8_threshold": 6.0, + "load_in_4bit": true, + "load_in_8bit": false, + "quant_method": "bitsandbytes" + }, + "versions": { + "harness|winogrande|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|hellaswag|0": 1.0, + "harness|piqa|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|arc:challenge|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|arc:easy|0": 1.0, + "harness|boolq|0": 2.0, + "harness|openbookqa|0": 1.0, + "harness|lambada:openai|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715326653.8249145, + "config": { + "model": "hf", + "model_args": "pretrained=leliuga/Phi-3-mini-128k-instruct-bnb-4bit,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 4, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/leliuga/results_2024-05-13-15-29-15.json b/leliuga/results_2024-05-13-15-29-15.json new file mode 100644 index 0000000000000000000000000000000000000000..caa7a9cb70c13ce43c62c794e7daa53d967d76cf --- /dev/null +++ b/leliuga/results_2024-05-13-15-29-15.json @@ -0,0 +1,586 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-13-15-29-15", + "total_evaluation_time_secondes": "", + "model_name": "leliuga/Llama-2-13b-chat-hf-bnb-4bit", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 7.2, + "model_params": 13.08, + "quant_type": "bitsandbytes", + "precision": "4bit" + }, + "results": { + "harness|winogrande|0": { + "acc,none": 0.6961325966850829, + "acc_stderr,none": 0.012926209475483577, + "alias": "winogrande" + }, + "harness|piqa|0": { + "acc,none": 0.7763873775843307, + "acc_stderr,none": 0.009721489519176294, + "acc_norm,none": 0.7878128400435256, + "acc_norm_stderr,none": 0.009539299828174096, + "alias": "piqa" + }, + "harness|lambada:openai|0": { + "perplexity,none": 2.9447294626493585, + "perplexity_stderr,none": 0.0704565389146536, + "acc,none": 0.7321948379584708, + "acc_stderr,none": 0.006169285604118633, + "alias": "lambada_openai" + }, + "harness|boolq|0": { + "acc,none": 0.8024464831804281, + "acc_stderr,none": 0.006963746631628732, + "alias": "boolq" + }, + "harness|mmlu|0": { + "acc,none": 0.5222190571143712, + "acc_stderr,none": 0.00399406528780583, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.48310308182784273, + "acc_stderr,none": 0.0068136684594462425 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.2698412698412698, + "acc_stderr,none": 0.03970158273235173 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.6666666666666666, + "acc_stderr,none": 0.0368105086916155 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.7450980392156863, + "acc_stderr,none": 0.030587591351604246 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.7215189873417721, + "acc_stderr,none": 0.02917868230484255 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.7355371900826446, + "acc_stderr,none": 0.04026187527591206 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.6666666666666666, + "acc_stderr,none": 0.04557239513497751 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.6932515337423313, + "acc_stderr,none": 0.03623089915724148 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.5838150289017341, + "acc_stderr,none": 0.026538189104705484 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.24581005586592178, + "acc_stderr,none": 0.014400296429225627 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6237942122186495, + "acc_stderr,none": 0.027513925683549427 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.5925925925925926, + "acc_stderr,none": 0.027339546640662734 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.3859191655801825, + "acc_stderr,none": 0.012433398911476138 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7719298245614035, + "acc_stderr,none": 0.032180937956023566 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.5983263598326359, + "acc_stderr,none": 0.00848786302301734 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.51, + "acc_stderr,none": 0.05024183937956912 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.569811320754717, + "acc_stderr,none": 0.030471445867183238 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.4682080924855491, + "acc_stderr,none": 0.03804749744364764 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.33, + "acc_stderr,none": 0.047258156262526045 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6143497757847534, + "acc_stderr,none": 0.03266842214289201 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.6893203883495146, + "acc_stderr,none": 0.04582124160161551 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.7905982905982906, + "acc_stderr,none": 0.026655699653922737 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.59, + "acc_stderr,none": 0.049431107042371025 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.7432950191570882, + "acc_stderr,none": 0.015620480263064533 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.6078431372549019, + "acc_stderr,none": 0.027956046165424513 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.41134751773049644, + "acc_stderr,none": 0.02935491115994098 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.4889705882352941, + "acc_stderr,none": 0.030365446477275668 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.4457831325301205, + "acc_stderr,none": 0.03869543323472101 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.6074098147546312, + "acc_stderr,none": 0.008568371486875524 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.2631578947368421, + "acc_stderr,none": 0.041424397194893624 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.6414141414141414, + "acc_stderr,none": 0.0341690364039152 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.7357512953367875, + "acc_stderr,none": 0.03182155050916646 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.48205128205128206, + "acc_stderr,none": 0.025334667080954932 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.5084033613445378, + "acc_stderr,none": 0.0324739027656967 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.7064220183486238, + "acc_stderr,none": 0.01952515112263966 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.6412213740458015, + "acc_stderr,none": 0.04206739313864908 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.5441176470588235, + "acc_stderr,none": 0.02014893942041575 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6090909090909091, + "acc_stderr,none": 0.04673752333670238 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.6653061224489796, + "acc_stderr,none": 0.030209235226242307 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.746268656716418, + "acc_stderr,none": 0.03076944496729602 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.79, + "acc_stderr,none": 0.040936018074033256 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.4224548049476689, + "acc_stderr,none": 0.00855257101480014 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.5259259259259259, + "acc_stderr,none": 0.04313531696750575 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.5657894736842105, + "acc_stderr,none": 0.04033565667848319 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.5486111111111112, + "acc_stderr,none": 0.04161402398403279 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.35, + "acc_stderr,none": 0.0479372485441102 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.48, + "acc_stderr,none": 0.050211673156867795 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.30392156862745096, + "acc_stderr,none": 0.045766654032077636 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.66, + "acc_stderr,none": 0.04760952285695237 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.39574468085106385, + "acc_stderr,none": 0.031967586978353627 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.47586206896551725, + "acc_stderr,none": 0.041618085035015295 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.328042328042328, + "acc_stderr,none": 0.024180497164376896 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.6193548387096774, + "acc_stderr,none": 0.027621717832907036 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.4187192118226601, + "acc_stderr,none": 0.03471192860518468 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.59, + "acc_stderr,none": 0.04943110704237102 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.27037037037037037, + "acc_stderr,none": 0.02708037281514567 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.31788079470198677, + "acc_stderr,none": 0.038020397601079024 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.35185185185185186, + "acc_stderr,none": 0.032568505702936464 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.33035714285714285, + "acc_stderr,none": 0.044642857142857116 + }, + "harness|arc:easy|0": { + "acc,none": 0.7718855218855218, + "acc_stderr,none": 0.008610355160815555, + "acc_norm,none": 0.7441077441077442, + "acc_norm_stderr,none": 0.008953950243013995, + "alias": "arc_easy" + }, + "harness|hellaswag|0": { + "acc,none": 0.6022704640509858, + "acc_stderr,none": 0.004884287515461496, + "acc_norm,none": 0.7938657637920733, + "acc_norm_stderr,none": 0.004037012714039292, + "alias": "hellaswag" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.43023620991960937, + "acc_stderr,none": 0.015553216656357404, + "alias": "truthfulqa_mc2" + }, + "harness|openbookqa|0": { + "acc,none": 0.35, + "acc_stderr,none": 0.021352091786223104, + "acc_norm,none": 0.448, + "acc_norm_stderr,none": 0.02226169729227014, + "alias": "openbookqa" + }, + "harness|arc:challenge|0": { + "acc,none": 0.4726962457337884, + "acc_stderr,none": 0.014589589101986, + "acc_norm,none": 0.49573378839590443, + "acc_norm_stderr,none": 0.014610858923956952, + "alias": "arc_challenge" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.2802937576499388, + "acc_stderr,none": 0.015723139524608774, + "alias": "truthfulqa_mc1" + } + }, + "task_info": { + "model": "leliuga/Llama-2-13b-chat-hf-bnb-4bit", + "revision": "main", + "private": false, + "params": 7.2, + "architectures": "LlamaForCausalLM", + "quant_type": "bitsandbytes", + "precision": "4bit", + "model_params": 13.08, + "model_size": 7.2, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-05-10T07:47:50Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "bnb_4bit_compute_dtype": "float16", + "bnb_4bit_quant_type": "nf4", + "bnb_4bit_use_double_quant": true, + "llm_int8_enable_fp32_cpu_offload": false, + "llm_int8_has_fp16_weight": false, + "llm_int8_skip_modules": null, + "llm_int8_threshold": 6.0, + "load_in_4bit": true, + "load_in_8bit": false + }, + "versions": { + "harness|winogrande|0": 1.0, + "harness|piqa|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|boolq|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|arc:easy|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|openbookqa|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715567404.117856, + "config": { + "model": "hf", + "model_args": "pretrained=leliuga/Llama-2-13b-chat-hf-bnb-4bit,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 1, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/lodrick-the-lafted/results_2024-05-14-16-49-36.json b/lodrick-the-lafted/results_2024-05-14-16-49-36.json new file mode 100644 index 0000000000000000000000000000000000000000..33a88a729e5349c8287dfcc03fac6af1046ad5ff --- /dev/null +++ b/lodrick-the-lafted/results_2024-05-14-16-49-36.json @@ -0,0 +1,583 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-14-16-49-36", + "total_evaluation_time_secondes": "", + "model_name": "lodrick-the-lafted/Olethros-8B-AWQ", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 5.73, + "model_params": 7.03, + "quant_type": "AWQ", + "precision": "4bit" + }, + "results": { + "harness|lambada:openai|0": { + "perplexity,none": 3.6321724591796505, + "perplexity_stderr,none": 0.08008686223558481, + "acc,none": 0.701533087521832, + "acc_stderr,none": 0.006375059594075384, + "alias": "lambada_openai" + }, + "harness|arc:challenge|0": { + "acc,none": 0.514505119453925, + "acc_stderr,none": 0.014605241081370056, + "acc_norm,none": 0.5511945392491467, + "acc_norm_stderr,none": 0.014534599585097664, + "alias": "arc_challenge" + }, + "harness|hellaswag|0": { + "acc,none": 0.592212706632145, + "acc_stderr,none": 0.004904189257891258, + "acc_norm,none": 0.7876916948814977, + "acc_norm_stderr,none": 0.0040810615176529695, + "alias": "hellaswag" + }, + "harness|winogrande|0": { + "acc,none": 0.7237569060773481, + "acc_stderr,none": 0.012566815015698158, + "alias": "winogrande" + }, + "harness|arc:easy|0": { + "acc,none": 0.8118686868686869, + "acc_stderr,none": 0.008019395492398136, + "acc_norm,none": 0.79503367003367, + "acc_norm_stderr,none": 0.008283277600626403, + "alias": "arc_easy" + }, + "harness|openbookqa|0": { + "acc,none": 0.34, + "acc_stderr,none": 0.021206117013673066, + "acc_norm,none": 0.446, + "acc_norm_stderr,none": 0.022252153078595897, + "alias": "openbookqa" + }, + "harness|boolq|0": { + "acc,none": 0.8262996941896025, + "acc_stderr,none": 0.006626155556927152, + "alias": "boolq" + }, + "harness|piqa|0": { + "acc,none": 0.7883569096844396, + "acc_stderr,none": 0.009530351270479404, + "acc_norm,none": 0.8030467899891186, + "acc_norm_stderr,none": 0.009278918898006383, + "alias": "piqa" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.3537331701346389, + "acc_stderr,none": 0.016737814358846147, + "alias": "truthfulqa_mc1" + }, + "harness|mmlu|0": { + "acc,none": 0.6277595784076342, + "acc_stderr,none": 0.0038091124373938728, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5564293304994686, + "acc_stderr,none": 0.006610420717965531 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.48412698412698413, + "acc_stderr,none": 0.04469881854072606 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7272727272727273, + "acc_stderr,none": 0.03477691162163659 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.8382352941176471, + "acc_stderr,none": 0.025845017986926913 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.8354430379746836, + "acc_stderr,none": 0.024135736240566946 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.768595041322314, + "acc_stderr,none": 0.038498560987940904 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7777777777777778, + "acc_stderr,none": 0.0401910747255735 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7607361963190185, + "acc_stderr,none": 0.033519538795212696 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.7196531791907514, + "acc_stderr,none": 0.024182427496577605 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.24022346368715083, + "acc_stderr,none": 0.014288343803925308 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.7009646302250804, + "acc_stderr,none": 0.02600330111788514 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.7253086419753086, + "acc_stderr,none": 0.024836057868294677 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.4634941329856584, + "acc_stderr,none": 0.012736153390214963 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.8128654970760234, + "acc_stderr,none": 0.029913127232368036 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.7161248793047956, + "acc_stderr,none": 0.007806351197124666 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.68, + "acc_stderr,none": 0.04688261722621505 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.7132075471698113, + "acc_stderr,none": 0.027834912527544064 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.6069364161849711, + "acc_stderr,none": 0.0372424959581773 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.38, + "acc_stderr,none": 0.048783173121456316 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.7085201793721974, + "acc_stderr,none": 0.03050028317654584 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.8252427184466019, + "acc_stderr,none": 0.037601780060266196 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8803418803418803, + "acc_stderr,none": 0.021262719400406957 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.81, + "acc_stderr,none": 0.039427724440366206 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.8173690932311622, + "acc_stderr,none": 0.013816335389973119 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.7320261437908496, + "acc_stderr,none": 0.025360603796242564 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.5177304964539007, + "acc_stderr,none": 0.02980873964223777 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.7132352941176471, + "acc_stderr,none": 0.02747227447323382 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.5481927710843374, + "acc_stderr,none": 0.03874371556587953 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.7367565810854728, + "acc_stderr,none": 0.0077769085837894 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.4824561403508772, + "acc_stderr,none": 0.04700708033551038 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.8282828282828283, + "acc_stderr,none": 0.02686971618742992 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8549222797927462, + "acc_stderr,none": 0.025416343096306457 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.6435897435897436, + "acc_stderr,none": 0.0242831405294673 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.7100840336134454, + "acc_stderr,none": 0.029472485833136074 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.8293577981651377, + "acc_stderr,none": 0.01612927102509989 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.7404580152671756, + "acc_stderr,none": 0.03844876139785271 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.6650326797385621, + "acc_stderr,none": 0.019094228167000318 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6909090909090909, + "acc_stderr,none": 0.044262946482000985 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.7306122448979592, + "acc_stderr,none": 0.02840125202902294 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.835820895522388, + "acc_stderr,none": 0.02619392354445413 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.84, + "acc_stderr,none": 0.03684529491774708 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.5407548366634951, + "acc_stderr,none": 0.008584889013687409 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.35, + "acc_stderr,none": 0.0479372485441102 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.6148148148148148, + "acc_stderr,none": 0.042039210401562783 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.6776315789473685, + "acc_stderr,none": 0.03803510248351585 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.7569444444444444, + "acc_stderr,none": 0.03586879280080341 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.42, + "acc_stderr,none": 0.049604496374885836 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.48, + "acc_stderr,none": 0.050211673156867795 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.34, + "acc_stderr,none": 0.04760952285695236 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.4215686274509804, + "acc_stderr,none": 0.04913595201274498 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.77, + "acc_stderr,none": 0.04229525846816505 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.5319148936170213, + "acc_stderr,none": 0.03261936918467382 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.6, + "acc_stderr,none": 0.04082482904638629 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.4656084656084656, + "acc_stderr,none": 0.025690321762493848 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7645161290322581, + "acc_stderr,none": 0.024137632429337703 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.5024630541871922, + "acc_stderr,none": 0.03517945038691063 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.68, + "acc_stderr,none": 0.04688261722621504 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.3925925925925926, + "acc_stderr,none": 0.02977384701253297 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.45695364238410596, + "acc_stderr,none": 0.04067325174247443 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.49074074074074076, + "acc_stderr,none": 0.034093869469927006 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.49107142857142855, + "acc_stderr,none": 0.04745033255489123 + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.5153921756698849, + "acc_stderr,none": 0.01493641287467341, + "alias": "truthfulqa_mc2" + } + }, + "task_info": { + "model": "lodrick-the-lafted/Olethros-8B-AWQ", + "revision": "main", + "private": false, + "params": 5.73, + "architectures": "LlamaForCausalLM", + "quant_type": "AWQ", + "precision": "4bit", + "model_params": 7.03, + "model_size": 5.73, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Waiting", + "submitted_time": "2024-05-11T18:47:40Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "bits": 4, + "group_size": 128, + "modules_to_not_convert": null, + "quant_method": "awq", + "version": "gemm", + "zero_point": true + }, + "versions": { + "harness|lambada:openai|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|boolq|0": 2.0, + "harness|piqa|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|truthfulqa:mc2|0": 2.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715652563.2538033, + "config": { + "model": "hf", + "model_args": "pretrained=lodrick-the-lafted/Olethros-8B-AWQ,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/maddes8cht/results_2024-05-09-20-40-54.json b/maddes8cht/results_2024-05-09-20-40-54.json new file mode 100644 index 0000000000000000000000000000000000000000..a4c9ec7a28d0c969d3c0203c95e93b9b7bd67373 --- /dev/null +++ b/maddes8cht/results_2024-05-09-20-40-54.json @@ -0,0 +1,579 @@ +{ + "config_general": { + "lighteval_sha": "no", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-09-20-40-54", + "total_evaluation_time_secondes": "", + "model_name": "maddes8cht/bofenghuang-vigogne-falcon-7b-chat-gguf", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 4.208434688, + "model_params": 7.21718976, + "quant_type": "llama.cpp", + "precision": "4bit" + }, + "results": { + "harness|hellaswag|0": { + "acc,none": 0.5779725154351723, + "acc_stderr,none": 0.004928735103635844, + "acc_norm,none": 0.7692690699063931, + "acc_norm_stderr,none": 0.004204395478506549, + "alias": "hellaswag" + }, + "harness|winogrande|0": { + "acc,none": 0.7024467245461721, + "acc_stderr,none": 0.012849085254614647, + "alias": "winogrande" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.2864137086903305, + "acc_stderr,none": 0.015826142439502366, + "alias": "truthfulqa_mc1" + }, + "harness|arc:easy|0": { + "acc,none": 0.7348484848484849, + "acc_stderr,none": 0.009057621139172616, + "acc_norm,none": 0.6982323232323232, + "acc_norm_stderr,none": 0.00941899415852253, + "alias": "arc_easy" + }, + "harness|boolq|0": { + "acc,none": 0.736697247706422, + "acc_stderr,none": 0.007703086288558767, + "alias": "boolq" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.10482325152235, + "perplexity_stderr,none": 0.0627804346559759, + "acc,none": 0.7137589753541627, + "acc_stderr,none": 0.006297291584451062, + "alias": "lambada_openai" + }, + "harness|arc:challenge|0": { + "acc,none": 0.4138225255972696, + "acc_stderr,none": 0.014392730009221014, + "acc_norm,none": 0.44368600682593856, + "acc_norm_stderr,none": 0.014518421825670445, + "alias": "arc_challenge" + }, + "harness|mmlu|0": { + "acc,none": 0.24170346104543514, + "acc_stderr,none": 0.0036029816287874716, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.2505844845908608, + "acc_stderr,none": 0.006304608379559278 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.30952380952380953, + "acc_stderr,none": 0.04134913018303317 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.21818181818181817, + "acc_stderr,none": 0.03225078108306289 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.28431372549019607, + "acc_stderr,none": 0.03166009679399813 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.29535864978902954, + "acc_stderr,none": 0.02969633871342289 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.2231404958677686, + "acc_stderr,none": 0.03800754475228732 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.28703703703703703, + "acc_stderr,none": 0.043733130409147614 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.2392638036809816, + "acc_stderr,none": 0.03351953879521269 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.2861271676300578, + "acc_stderr,none": 0.02433214677913413 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.23910614525139665, + "acc_stderr,none": 0.014265554192331154 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.1864951768488746, + "acc_stderr,none": 0.022122439772480764 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.22530864197530864, + "acc_stderr,none": 0.02324620264781975 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.242503259452412, + "acc_stderr,none": 0.010946570966348766 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.3684210526315789, + "acc_stderr,none": 0.036996580176568775 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.2565175410363695, + "acc_stderr,none": 0.007800700651177248 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.32, + "acc_stderr,none": 0.046882617226215034 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.2188679245283019, + "acc_stderr,none": 0.025447863825108604 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.21965317919075145, + "acc_stderr,none": 0.031568093627031744 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.16, + "acc_stderr,none": 0.036845294917747094 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.37668161434977576, + "acc_stderr,none": 0.03252113489929188 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.20388349514563106, + "acc_stderr,none": 0.03989139859531771 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.29914529914529914, + "acc_stderr,none": 0.02999695185834948 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.28, + "acc_stderr,none": 0.04512608598542127 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.27330779054916987, + "acc_stderr,none": 0.01593668106262856 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.24836601307189543, + "acc_stderr,none": 0.02473998135511359 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.22695035460992907, + "acc_stderr,none": 0.024987106365642973 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.19117647058823528, + "acc_stderr,none": 0.02388688192244036 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.26506024096385544, + "acc_stderr,none": 0.03436024037944967 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.23431914202144946, + "acc_stderr,none": 0.00762480998468139 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.2543859649122807, + "acc_stderr,none": 0.04096985139843669 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.20707070707070707, + "acc_stderr,none": 0.02886977846026706 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.18652849740932642, + "acc_stderr,none": 0.02811209121011749 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.21025641025641026, + "acc_stderr,none": 0.020660597485026928 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.22268907563025211, + "acc_stderr,none": 0.02702543349888238 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.20733944954128442, + "acc_stderr,none": 0.017381415563608667 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.29770992366412213, + "acc_stderr,none": 0.04010358942462203 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.2565359477124183, + "acc_stderr,none": 0.017667841612378967 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.3090909090909091, + "acc_stderr,none": 0.044262946482000985 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.21224489795918366, + "acc_stderr,none": 0.026176967197866767 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.263681592039801, + "acc_stderr,none": 0.03115715086935557 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.32, + "acc_stderr,none": 0.046882617226215034 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.2210593085949889, + "acc_stderr,none": 0.007382955233076822 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.21, + "acc_stderr,none": 0.04093601807403326 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.2074074074074074, + "acc_stderr,none": 0.03502553170678318 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.21052631578947367, + "acc_stderr,none": 0.03317672787533158 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.2847222222222222, + "acc_stderr,none": 0.03773809990686935 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.21, + "acc_stderr,none": 0.04093601807403325 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.31, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.22, + "acc_stderr,none": 0.0416333199893227 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.21568627450980393, + "acc_stderr,none": 0.04092563958237657 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.27, + "acc_stderr,none": 0.044619604333847394 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.24680851063829787, + "acc_stderr,none": 0.028185441301234116 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.22758620689655173, + "acc_stderr,none": 0.03493950380131183 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.19576719576719576, + "acc_stderr,none": 0.020435730971541805 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.2064516129032258, + "acc_stderr,none": 0.02302589961718872 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.1625615763546798, + "acc_stderr,none": 0.02596030006460557 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.28, + "acc_stderr,none": 0.045126085985421276 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.2111111111111111, + "acc_stderr,none": 0.024882116857655092 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.2052980132450331, + "acc_stderr,none": 0.032979866484738336 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.18518518518518517, + "acc_stderr,none": 0.026491914727355154 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.30357142857142855, + "acc_stderr,none": 0.04364226155841044 + }, + "harness|piqa|0": { + "acc,none": 0.8014145810663765, + "acc_stderr,none": 0.009307814521717871, + "acc_norm,none": 0.8014145810663765, + "acc_norm_stderr,none": 0.009307814521717904, + "alias": "piqa" + }, + "harness|openbookqa|0": { + "acc,none": 0.33, + "acc_stderr,none": 0.021049612166134803, + "acc_norm,none": 0.454, + "acc_norm_stderr,none": 0.02228814759117695, + "alias": "openbookqa" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.4312124468998062, + "acc_stderr,none": 0.01481217597513169, + "alias": "truthfulqa_mc2" + } + }, + "task_info": { + "model": "maddes8cht/bofenghuang-vigogne-falcon-7b-chat-gguf", + "revision": "main", + "private": false, + "params": 28.0, + "architectures": "?", + "quant_type": "llama.cpp", + "precision": "4bit", + "model_params": 56.0, + "model_size": 28.0, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "cpu", + "status": "Pending", + "submitted_time": "2024-05-08T01:22:19Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "llama_cpp" + }, + "quantization_config": { + "quant_method": "llama.cpp", + "ftype": "*Q4_0.gguf" + }, + "versions": { + "harness|hellaswag|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|arc:easy|0": 1.0, + "harness|boolq|0": 2.0, + "harness|lambada:openai|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|piqa|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715218895.5899148, + "config": { + "model": "WrapperGGUFLM", + "model_args": "gguf_model=maddes8cht/bofenghuang-vigogne-falcon-7b-chat-gguf,ftype=*Q4_0.gguf,dtype=float16,_commit_hash=main", + "batch_size": 1, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/mlabonne/results_2024-05-19-22-39-56.json b/mlabonne/results_2024-05-19-22-39-56.json new file mode 100644 index 0000000000000000000000000000000000000000..c476dfec6580cb55eac067d182387a2e3b8945da --- /dev/null +++ b/mlabonne/results_2024-05-19-22-39-56.json @@ -0,0 +1,579 @@ +{ + "config_general": { + "lighteval_sha": "no", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-19-22-39-56", + "total_evaluation_time_secondes": "", + "model_name": "mlabonne/gemma-7b-it-GGUF", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 5.650919424, + "model_params": 9.324112896, + "quant_type": "llama.cpp", + "precision": "4bit" + }, + "results": { + "harness|winogrande|0": { + "acc,none": 0.6708760852407262, + "acc_stderr,none": 0.013206387089091472, + "alias": "winogrande" + }, + "harness|piqa|0": { + "acc,none": 0.7747551686615887, + "acc_stderr,none": 0.00974664347103215, + "acc_norm,none": 0.7736670293797606, + "acc_norm_stderr,none": 0.009763294246879424, + "alias": "piqa" + }, + "harness|hellaswag|0": { + "acc,none": 0.5575582553276239, + "acc_stderr,none": 0.004956609327218413, + "acc_norm,none": 0.7247560246962756, + "acc_norm_stderr,none": 0.004457243336616546, + "alias": "hellaswag" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.4723133865029962, + "acc_stderr,none": 0.016394421729194586, + "alias": "truthfulqa_mc2" + }, + "harness|mmlu|0": { + "acc,none": 0.5007833641931349, + "acc_stderr,none": 0.004037272554499701, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.44718384697130714, + "acc_stderr,none": 0.0069216159700638885 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.373015873015873, + "acc_stderr,none": 0.04325506042017086 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.6060606060606061, + "acc_stderr,none": 0.0381549430868893 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.5882352941176471, + "acc_stderr,none": 0.0345423658538061 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.70042194092827, + "acc_stderr,none": 0.029818024749753095 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.6446280991735537, + "acc_stderr,none": 0.0436923632657398 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.6296296296296297, + "acc_stderr,none": 0.0466840803302493 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.5766871165644172, + "acc_stderr,none": 0.03881891213334383 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.5606936416184971, + "acc_stderr,none": 0.026720034380514988 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.23016759776536314, + "acc_stderr,none": 0.014078339253425814 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.5755627009646302, + "acc_stderr,none": 0.028071928247946208 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.5401234567901234, + "acc_stderr,none": 0.027731022753539284 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.36962190352020863, + "acc_stderr,none": 0.012328445778575246 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.6432748538011696, + "acc_stderr,none": 0.03674013002860954 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.5806243965239781, + "acc_stderr,none": 0.008544482287190786 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.54, + "acc_stderr,none": 0.05009082659620332 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.5320754716981132, + "acc_stderr,none": 0.030709486992556552 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.4161849710982659, + "acc_stderr,none": 0.03758517775404947 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.41, + "acc_stderr,none": 0.049431107042371025 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6412556053811659, + "acc_stderr,none": 0.03219079200419996 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.6699029126213593, + "acc_stderr,none": 0.046561471100123514 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8162393162393162, + "acc_stderr,none": 0.025372139671722933 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.62, + "acc_stderr,none": 0.04878317312145633 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.70242656449553, + "acc_stderr,none": 0.016349111912909425 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.5849673202614379, + "acc_stderr,none": 0.028213504177824096 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.36879432624113473, + "acc_stderr,none": 0.028782227561347254 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.45955882352941174, + "acc_stderr,none": 0.03027332507734575 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.4397590361445783, + "acc_stderr,none": 0.03864139923699122 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.579785505362366, + "acc_stderr,none": 0.008681697144433616 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.3157894736842105, + "acc_stderr,none": 0.04372748290278007 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.6616161616161617, + "acc_stderr,none": 0.033711241426263035 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.6994818652849741, + "acc_stderr,none": 0.03308818594415749 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.47435897435897434, + "acc_stderr,none": 0.025317649726448666 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.5126050420168067, + "acc_stderr,none": 0.03246816765752174 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.728440366972477, + "acc_stderr,none": 0.019069098363191442 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.6183206106870229, + "acc_stderr,none": 0.042607351576445594 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.4918300653594771, + "acc_stderr,none": 0.020225134343057272 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6545454545454545, + "acc_stderr,none": 0.04554619617541054 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.5061224489795918, + "acc_stderr,none": 0.032006820201639086 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.6417910447761194, + "acc_stderr,none": 0.03390393042268814 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.71, + "acc_stderr,none": 0.045604802157206845 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.42499207104345066, + "acc_stderr,none": 0.008598210668566867 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.45185185185185184, + "acc_stderr,none": 0.04299268905480864 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.48026315789473684, + "acc_stderr,none": 0.040657710025626036 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.5694444444444444, + "acc_stderr,none": 0.04140685639111503 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.36, + "acc_stderr,none": 0.04824181513244218 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.48, + "acc_stderr,none": 0.050211673156867795 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.34, + "acc_stderr,none": 0.04760952285695235 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.2549019607843137, + "acc_stderr,none": 0.04336432707993177 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.7, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.4851063829787234, + "acc_stderr,none": 0.032671518489247764 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.4827586206896552, + "acc_stderr,none": 0.04164188720169377 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.3439153439153439, + "acc_stderr,none": 0.024464426625596437 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.5935483870967742, + "acc_stderr,none": 0.027941727346256304 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.41379310344827586, + "acc_stderr,none": 0.03465304488406796 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.52, + "acc_stderr,none": 0.05021167315686779 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.3037037037037037, + "acc_stderr,none": 0.02803792996911499 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.33774834437086093, + "acc_stderr,none": 0.038615575462551684 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.28703703703703703, + "acc_stderr,none": 0.03085199299325701 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.45535714285714285, + "acc_stderr,none": 0.04726835553719099 + }, + "harness|openbookqa|0": { + "acc,none": 0.376, + "acc_stderr,none": 0.021683827539286115, + "acc_norm,none": 0.452, + "acc_norm_stderr,none": 0.02227969410784342, + "alias": "openbookqa" + }, + "harness|arc:easy|0": { + "acc,none": 0.7571548821548821, + "acc_stderr,none": 0.008798836444222012, + "acc_norm,none": 0.734006734006734, + "acc_norm_stderr,none": 0.009066789565615694, + "alias": "arc_easy" + }, + "harness|lambada:openai|0": { + "perplexity,none": 10.16533173500903, + "perplexity_stderr,none": 0.5211498221559918, + "acc,none": 0.10712206481661168, + "acc_stderr,none": 0.004308713186753709, + "alias": "lambada_openai" + }, + "harness|arc:challenge|0": { + "acc,none": 0.4684300341296928, + "acc_stderr,none": 0.014582236460866977, + "acc_norm,none": 0.48464163822525597, + "acc_norm_stderr,none": 0.014604496129394904, + "alias": "arc_challenge" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.2998776009791922, + "acc_stderr,none": 0.016040352966713634, + "alias": "truthfulqa_mc1" + }, + "harness|boolq|0": { + "acc,none": 0.8110091743119267, + "acc_stderr,none": 0.0068474013553199604, + "alias": "boolq" + } + }, + "task_info": { + "model": "mlabonne/gemma-7b-it-GGUF", + "revision": "main", + "private": false, + "params": 7.0, + "architectures": "?", + "quant_type": "llama.cpp", + "precision": "4bit", + "model_params": 7.0, + "model_size": 5.01, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "cpu", + "status": "Pending", + "submitted_time": "2024-04-29T05:16:42Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "llama_cpp" + }, + "quantization_config": { + "quant_method": "llama.cpp", + "ftype": "*Q4_0.gguf" + }, + "versions": { + "harness|winogrande|0": 1.0, + "harness|piqa|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|openbookqa|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|boolq|0": 2.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715999584.8196464, + "config": { + "model": "WrapperGGUFLM", + "model_args": "gguf_model=mlabonne/gemma-7b-it-GGUF,ftype=*Q4_0.gguf,dtype=float16,_commit_hash=main", + "batch_size": 1, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/noxinc/results_2024-05-22-01-00-12.json b/noxinc/results_2024-05-22-01-00-12.json new file mode 100644 index 0000000000000000000000000000000000000000..e50ca713f15d26ee76abea7cdaa365df6fe2e4ef --- /dev/null +++ b/noxinc/results_2024-05-22-01-00-12.json @@ -0,0 +1,579 @@ +{ + "config_general": { + "lighteval_sha": "no", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-22-01-00-12", + "total_evaluation_time_secondes": "", + "model_name": "noxinc/phi-3-portuguese-tom-cat-4k-instruct-Q4_0-GGUF-PTBR", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 2.175438336, + "model_params": 3.821079552, + "quant_type": "llama.cpp", + "precision": "4bit" + }, + "results": { + "harness|arc:challenge|0": { + "acc,none": 0.53839590443686, + "acc_stderr,none": 0.014568245550296361, + "acc_norm,none": 0.560580204778157, + "acc_norm_stderr,none": 0.014503747823580125, + "alias": "arc_challenge" + }, + "harness|mmlu|0": { + "acc,none": 0.6323173337131462, + "acc_stderr,none": 0.0038812703429438895, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5861849096705632, + "acc_stderr,none": 0.006909898897532406 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.4523809523809524, + "acc_stderr,none": 0.044518079590553275 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7454545454545455, + "acc_stderr,none": 0.03401506715249039 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.7254901960784313, + "acc_stderr,none": 0.031321798030832904 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.7088607594936709, + "acc_stderr,none": 0.029571601065753378 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.7933884297520661, + "acc_stderr,none": 0.036959801280988254 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7314814814814815, + "acc_stderr,none": 0.042844679680521934 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7607361963190185, + "acc_stderr,none": 0.033519538795212696 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.6878612716763006, + "acc_stderr,none": 0.024946792225272314 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.4581005586592179, + "acc_stderr,none": 0.016663683295020527 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.684887459807074, + "acc_stderr,none": 0.026385273703464492 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.7561728395061729, + "acc_stderr,none": 0.023891879541959603 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.4680573663624511, + "acc_stderr,none": 0.012744149704869647 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.8128654970760234, + "acc_stderr,none": 0.029913127232368022 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.6923076923076923, + "acc_stderr,none": 0.00802239670550724 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.72, + "acc_stderr,none": 0.04512608598542127 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.7320754716981132, + "acc_stderr,none": 0.027257260322494845 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.6184971098265896, + "acc_stderr,none": 0.03703851193099521 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.38, + "acc_stderr,none": 0.048783173121456316 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6502242152466368, + "acc_stderr,none": 0.03200736719484503 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.8446601941747572, + "acc_stderr,none": 0.03586594738573974 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8760683760683761, + "acc_stderr,none": 0.021586494001281403 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.66, + "acc_stderr,none": 0.04760952285695238 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.7905491698595147, + "acc_stderr,none": 0.014551310568143704 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.6601307189542484, + "acc_stderr,none": 0.02712195607138886 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.5390070921985816, + "acc_stderr,none": 0.029736592526424438 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.6691176470588235, + "acc_stderr,none": 0.028582709753898438 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.4939759036144578, + "acc_stderr,none": 0.03892212195333047 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.7312317192070198, + "acc_stderr,none": 0.007822932557118559 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.49122807017543857, + "acc_stderr,none": 0.04702880432049615 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7929292929292929, + "acc_stderr,none": 0.028869778460267063 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8497409326424871, + "acc_stderr,none": 0.025787723180723872 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.7051282051282052, + "acc_stderr,none": 0.02311936275823229 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.7563025210084033, + "acc_stderr,none": 0.027886828078380558 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.8366972477064221, + "acc_stderr,none": 0.01584825580650153 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.6641221374045801, + "acc_stderr,none": 0.04142313771996665 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.6666666666666666, + "acc_stderr,none": 0.019070985589687492 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6727272727272727, + "acc_stderr,none": 0.044942908662520896 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.5918367346938775, + "acc_stderr,none": 0.03146465712827423 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.8059701492537313, + "acc_stderr,none": 0.027962677604768893 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.86, + "acc_stderr,none": 0.0348735088019777 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.545512210593086, + "acc_stderr,none": 0.008466724298098851 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.38, + "acc_stderr,none": 0.048783173121456316 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.6666666666666666, + "acc_stderr,none": 0.04072314811876837 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.6776315789473685, + "acc_stderr,none": 0.03803510248351585 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.7638888888888888, + "acc_stderr,none": 0.03551446610810826 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.44, + "acc_stderr,none": 0.04988876515698589 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.37, + "acc_stderr,none": 0.048523658709391 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.32, + "acc_stderr,none": 0.04688261722621504 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.35294117647058826, + "acc_stderr,none": 0.047551296160629475 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.8, + "acc_stderr,none": 0.04020151261036845 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.6212765957446809, + "acc_stderr,none": 0.03170995606040655 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5586206896551724, + "acc_stderr,none": 0.04137931034482757 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.46825396825396826, + "acc_stderr,none": 0.025699352832131792 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.8129032258064516, + "acc_stderr,none": 0.02218571009225225 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.5221674876847291, + "acc_stderr,none": 0.03514528562175008 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.6, + "acc_stderr,none": 0.049236596391733084 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.34444444444444444, + "acc_stderr,none": 0.028972648884844267 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.423841059602649, + "acc_stderr,none": 0.04034846678603397 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.5138888888888888, + "acc_stderr,none": 0.03408655867977749 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.5357142857142857, + "acc_stderr,none": 0.04733667890053756 + }, + "harness|winogrande|0": { + "acc,none": 0.7024467245461721, + "acc_stderr,none": 0.012849085254614647, + "alias": "winogrande" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.379436964504284, + "acc_stderr,none": 0.016987039266142975, + "alias": "truthfulqa_mc1" + }, + "harness|boolq|0": { + "acc,none": 0.8492354740061162, + "acc_stderr,none": 0.00625829724426619, + "alias": "boolq" + }, + "harness|piqa|0": { + "acc,none": 0.794885745375408, + "acc_stderr,none": 0.009420971671017913, + "acc_norm,none": 0.794341675734494, + "acc_norm_stderr,none": 0.009430229076102503, + "alias": "piqa" + }, + "harness|arc:easy|0": { + "acc,none": 0.8063973063973064, + "acc_stderr,none": 0.008107714081954553, + "acc_norm,none": 0.7929292929292929, + "acc_norm_stderr,none": 0.008314665023956568, + "alias": "arc_easy" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.5544066746600581, + "acc_stderr,none": 0.015175889172914337, + "alias": "truthfulqa_mc2" + }, + "harness|lambada:openai|0": { + "perplexity,none": 5.88455992880931, + "perplexity_stderr,none": 0.15247629498325097, + "acc,none": 0.3564913642538327, + "acc_stderr,none": 0.006672886984196191, + "alias": "lambada_openai" + }, + "harness|hellaswag|0": { + "acc,none": 0.5780720971917944, + "acc_stderr,none": 0.004928578106026359, + "acc_norm,none": 0.7599083847839075, + "acc_norm_stderr,none": 0.004262659388824527, + "alias": "hellaswag" + }, + "harness|openbookqa|0": { + "acc,none": 0.342, + "acc_stderr,none": 0.021236147199899254, + "acc_norm,none": 0.436, + "acc_norm_stderr,none": 0.0221989546414768, + "alias": "openbookqa" + } + }, + "task_info": { + "model": "noxinc/phi-3-portuguese-tom-cat-4k-instruct-Q4_0-GGUF-PTBR", + "revision": "main", + "private": false, + "params": null, + "architectures": "?", + "quant_type": "llama.cpp", + "precision": "4bit", + "model_params": null, + "model_size": null, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "cpu", + "status": "Pending", + "submitted_time": "2024-05-20T04:01:34Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "llama_cpp" + }, + "quantization_config": { + "quant_method": "llama.cpp", + "ftype": "*Q4_0.gguf" + }, + "versions": { + "harness|arc:challenge|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|winogrande|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|boolq|0": 2.0, + "harness|piqa|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|lambada:openai|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|openbookqa|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1716293460.9694755, + "config": { + "model": "WrapperGGUFLM", + "model_args": "gguf_model=noxinc/phi-3-portuguese-tom-cat-4k-instruct-Q4_0-GGUF-PTBR,ftype=*Q4_0.gguf,dtype=float16,_commit_hash=main", + "batch_size": 1, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/second-state/results_2024-05-09-01-39-12.json b/second-state/results_2024-05-09-01-39-12.json new file mode 100644 index 0000000000000000000000000000000000000000..14ef1e086fea6359328285e44a01df8f306101f4 --- /dev/null +++ b/second-state/results_2024-05-09-01-39-12.json @@ -0,0 +1,579 @@ +{ + "config_general": { + "lighteval_sha": "no", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-09-01-39-12", + "total_evaluation_time_secondes": "", + "model_name": "second-state/Baichuan2-7B-Chat-GGUF", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 4.355760128, + "model_params": 7.505973248, + "quant_type": "llama.cpp", + "precision": "4bit" + }, + "results": { + "harness|arc:easy|0": { + "acc,none": 0.3808922558922559, + "acc_stderr,none": 0.009964428212260387, + "acc_norm,none": 0.3930976430976431, + "acc_norm_stderr,none": 0.010022540618945324, + "alias": "arc_easy" + }, + "harness|hellaswag|0": { + "acc,none": 0.4235212109141605, + "acc_stderr,none": 0.004931065434173693, + "acc_norm,none": 0.5505875323640709, + "acc_norm_stderr,none": 0.004964177035221407, + "alias": "hellaswag" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.5034393339017434, + "acc_stderr,none": 0.016074554662135965, + "alias": "truthfulqa_mc2" + }, + "harness|arc:challenge|0": { + "acc,none": 0.3046075085324232, + "acc_stderr,none": 0.01344952210993249, + "acc_norm,none": 0.3532423208191126, + "acc_norm_stderr,none": 0.013967822714840055, + "alias": "arc_challenge" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.3072215422276622, + "acc_stderr,none": 0.01615020132132302, + "alias": "truthfulqa_mc1" + }, + "harness|lambada:openai|0": { + "perplexity,none": 1.0544404776888845, + "perplexity_stderr,none": 0.014876020811968618, + "acc,none": 0.8715311469047157, + "acc_stderr,none": 0.004661787280872993, + "alias": "lambada_openai" + }, + "harness|mmlu|0": { + "acc,none": 0.22945449366187154, + "acc_stderr,none": 0.0035426239458926224, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.24208289054197663, + "acc_stderr,none": 0.0062426684031394305 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.2857142857142857, + "acc_stderr,none": 0.04040610178208841 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.21818181818181817, + "acc_stderr,none": 0.03225078108306289 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.25, + "acc_stderr,none": 0.03039153369274154 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.270042194092827, + "acc_stderr,none": 0.028900721906293426 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.2396694214876033, + "acc_stderr,none": 0.03896878985070417 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.25925925925925924, + "acc_stderr,none": 0.04236511258094634 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.22085889570552147, + "acc_stderr,none": 0.032591773927421776 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.24855491329479767, + "acc_stderr,none": 0.023267528432100174 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.23798882681564246, + "acc_stderr,none": 0.014242630070574885 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.1864951768488746, + "acc_stderr,none": 0.02212243977248077 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.21604938271604937, + "acc_stderr,none": 0.022899162918445813 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.2457627118644068, + "acc_stderr,none": 0.01099615663514269 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.3216374269005848, + "acc_stderr,none": 0.03582529442573122 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.23978113936272932, + "acc_stderr,none": 0.00764225029165751 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.21509433962264152, + "acc_stderr,none": 0.025288394502891377 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.20809248554913296, + "acc_stderr,none": 0.030952890217749884 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.18, + "acc_stderr,none": 0.038612291966536955 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.31390134529147984, + "acc_stderr,none": 0.03114679648297246 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.17475728155339806, + "acc_stderr,none": 0.03760178006026621 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.2905982905982906, + "acc_stderr,none": 0.029745048572674057 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.23754789272030652, + "acc_stderr,none": 0.015218733046150195 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.22549019607843138, + "acc_stderr,none": 0.023929155517351284 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.23404255319148937, + "acc_stderr,none": 0.025257861359432407 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.18382352941176472, + "acc_stderr,none": 0.02352924218519311 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.28313253012048195, + "acc_stderr,none": 0.03507295431370518 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.2170945726356841, + "acc_stderr,none": 0.007428786285788534 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.23684210526315788, + "acc_stderr,none": 0.039994238792813386 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.17676767676767677, + "acc_stderr,none": 0.027178752639044915 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.19689119170984457, + "acc_stderr,none": 0.02869787397186069 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.20256410256410257, + "acc_stderr,none": 0.020377660970371397 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.21008403361344538, + "acc_stderr,none": 0.026461398717471874 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.1926605504587156, + "acc_stderr,none": 0.016909276884936073 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.2595419847328244, + "acc_stderr,none": 0.03844876139785271 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.25, + "acc_stderr,none": 0.01751781884501444 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.21818181818181817, + "acc_stderr,none": 0.03955932861795833 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.18775510204081633, + "acc_stderr,none": 0.02500025603954622 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.24378109452736318, + "acc_stderr,none": 0.030360490154014652 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.28, + "acc_stderr,none": 0.045126085985421276 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.21249603552172533, + "acc_stderr,none": 0.007271218700485502 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.22, + "acc_stderr,none": 0.04163331998932269 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.18518518518518517, + "acc_stderr,none": 0.03355677216313142 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.17763157894736842, + "acc_stderr,none": 0.031103182383123398 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.2569444444444444, + "acc_stderr,none": 0.03653946969442099 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.2, + "acc_stderr,none": 0.040201512610368445 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.26, + "acc_stderr,none": 0.044084400227680794 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.21, + "acc_stderr,none": 0.040936018074033256 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.21568627450980393, + "acc_stderr,none": 0.040925639582376556 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.28, + "acc_stderr,none": 0.045126085985421276 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.26382978723404255, + "acc_stderr,none": 0.02880998985410298 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.2413793103448276, + "acc_stderr,none": 0.03565998174135302 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.20899470899470898, + "acc_stderr,none": 0.020940481565334835 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.1774193548387097, + "acc_stderr,none": 0.021732540689329265 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.15270935960591134, + "acc_stderr,none": 0.025308904539380624 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.25, + "acc_stderr,none": 0.04351941398892446 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.2111111111111111, + "acc_stderr,none": 0.02488211685765508 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.1986754966887417, + "acc_stderr,none": 0.032578473844367746 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.1527777777777778, + "acc_stderr,none": 0.02453632602613422 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.3125, + "acc_stderr,none": 0.043994650575715215 + }, + "harness|piqa|0": { + "acc,none": 0.6887921653971708, + "acc_stderr,none": 0.010802263878045842, + "acc_norm,none": 0.691512513601741, + "acc_norm_stderr,none": 0.010776164678037155, + "alias": "piqa" + }, + "harness|boolq|0": { + "acc,none": 0.39174311926605504, + "acc_stderr,none": 0.00853761847747861, + "alias": "boolq" + }, + "harness|openbookqa|0": { + "acc,none": 0.256, + "acc_stderr,none": 0.019536923574747605, + "acc_norm,none": 0.28, + "acc_norm_stderr,none": 0.020099950647503237, + "alias": "openbookqa" + }, + "harness|winogrande|0": { + "acc,none": 0.6274664561957379, + "acc_stderr,none": 0.013588173888522449, + "alias": "winogrande" + } + }, + "task_info": { + "model": "second-state/Baichuan2-7B-Chat-GGUF", + "revision": "main", + "private": false, + "params": 28.0, + "architectures": "?", + "quant_type": "llama.cpp", + "precision": "4bit", + "model_params": 56.0, + "model_size": 28.0, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "cpu", + "status": "Pending", + "submitted_time": "2024-05-07T04:03:11Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "llama_cpp" + }, + "quantization_config": { + "quant_method": "llama.cpp", + "ftype": "*Q4_0.gguf" + }, + "versions": { + "harness|arc:easy|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|arc:challenge|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|lambada:openai|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|piqa|0": 1.0, + "harness|boolq|0": 2.0, + "harness|openbookqa|0": 1.0, + "harness|winogrande|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715117188.1310027, + "config": { + "model": "WrapperGGUFLM", + "model_args": "gguf_model=second-state/Baichuan2-7B-Chat-GGUF,ftype=*Q4_0.gguf,dtype=float16,_commit_hash=main", + "batch_size": 1, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/solidrust/results_2024-04-28-11-43-27.json b/solidrust/results_2024-04-28-11-43-27.json new file mode 100644 index 0000000000000000000000000000000000000000..b747793f05915cbc0fa4a835db7239836eb87f6f --- /dev/null +++ b/solidrust/results_2024-04-28-11-43-27.json @@ -0,0 +1,583 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-04-28-11-43-27", + "total_evaluation_time_secondes": "", + "model_name": "solidrust/Meta-Llama-3-8B-Instruct-hf-AWQ", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 5.73, + "model_params": 7.03, + "quant_type": "AWQ", + "precision": "4bit" + }, + "results": { + "harness|lambada:openai|0": { + "perplexity,none": 3.1935073590428087, + "perplexity_stderr,none": 0.07777440049727834, + "acc,none": 0.720551135261013, + "acc_stderr,none": 0.006251664323978085, + "alias": "lambada_openai" + }, + "harness|boolq|0": { + "acc,none": 0.828440366972477, + "acc_stderr,none": 0.0065937233273874515, + "alias": "boolq" + }, + "harness|mmlu|0": { + "acc,none": 0.6159379005839624, + "acc_stderr,none": 0.003891283978010192, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5636556854410202, + "acc_stderr,none": 0.006801499050906265 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.4523809523809524, + "acc_stderr,none": 0.044518079590553275 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7393939393939394, + "acc_stderr,none": 0.034277431758165236 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.8284313725490197, + "acc_stderr,none": 0.026460569561240658 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.8059071729957806, + "acc_stderr,none": 0.025744902532290934 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.7355371900826446, + "acc_stderr,none": 0.04026187527591206 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7592592592592593, + "acc_stderr,none": 0.041331194402438376 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7668711656441718, + "acc_stderr,none": 0.0332201579577674 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.6647398843930635, + "acc_stderr,none": 0.02541600377316555 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.3396648044692737, + "acc_stderr,none": 0.015839400406212505 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6977491961414791, + "acc_stderr,none": 0.02608270069539966 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.7222222222222222, + "acc_stderr,none": 0.02492200116888633 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.45697522816166886, + "acc_stderr,none": 0.012722869501611419 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7660818713450293, + "acc_stderr,none": 0.03246721765117826 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.6910202767943354, + "acc_stderr,none": 0.007999286870057722 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.64, + "acc_stderr,none": 0.048241815132442176 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.6981132075471698, + "acc_stderr,none": 0.02825420034443866 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.6011560693641619, + "acc_stderr,none": 0.037336266553835096 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.37, + "acc_stderr,none": 0.04852365870939099 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6771300448430493, + "acc_stderr,none": 0.03138147637575499 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.8155339805825242, + "acc_stderr,none": 0.03840423627288276 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8547008547008547, + "acc_stderr,none": 0.02308663508684141 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.81, + "acc_stderr,none": 0.03942772444036623 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.7982120051085568, + "acc_stderr,none": 0.01435170218163687 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.6830065359477124, + "acc_stderr,none": 0.02664327847450875 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.49645390070921985, + "acc_stderr,none": 0.02982674915328092 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.6875, + "acc_stderr,none": 0.02815637344037142 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.4819277108433735, + "acc_stderr,none": 0.03889951252827215 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.7149821254468638, + "acc_stderr,none": 0.007978473473048482 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.4298245614035088, + "acc_stderr,none": 0.04657047260594963 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7676767676767676, + "acc_stderr,none": 0.03008862949021749 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8601036269430051, + "acc_stderr,none": 0.025033870583015167 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.6333333333333333, + "acc_stderr,none": 0.024433016466052455 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.6596638655462185, + "acc_stderr,none": 0.03077805742293167 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.7981651376146789, + "acc_stderr,none": 0.017208579357787572 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.7480916030534351, + "acc_stderr,none": 0.03807387116306086 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.6699346405228758, + "acc_stderr,none": 0.019023726160724553 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6272727272727273, + "acc_stderr,none": 0.04631381319425464 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.6938775510204082, + "acc_stderr,none": 0.029504896454595968 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.8258706467661692, + "acc_stderr,none": 0.026814951200421603 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.81, + "acc_stderr,none": 0.03942772444036624 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.5233111322549953, + "acc_stderr,none": 0.008634698284601918 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.35, + "acc_stderr,none": 0.047937248544110196 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.562962962962963, + "acc_stderr,none": 0.042849586397534 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.6644736842105263, + "acc_stderr,none": 0.038424985593952694 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.7222222222222222, + "acc_stderr,none": 0.03745554791462457 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.47, + "acc_stderr,none": 0.05016135580465919 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.51, + "acc_stderr,none": 0.05024183937956912 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.32, + "acc_stderr,none": 0.046882617226215034 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.4411764705882353, + "acc_stderr,none": 0.049406356306056595 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.72, + "acc_stderr,none": 0.04512608598542129 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.5531914893617021, + "acc_stderr,none": 0.0325005368436584 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5793103448275863, + "acc_stderr,none": 0.0411391498118926 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.43915343915343913, + "acc_stderr,none": 0.025559920550531003 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7322580645161291, + "acc_stderr,none": 0.025189006660212374 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.4433497536945813, + "acc_stderr,none": 0.03495334582162933 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.68, + "acc_stderr,none": 0.04688261722621505 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.36666666666666664, + "acc_stderr,none": 0.029381620726465073 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.47019867549668876, + "acc_stderr,none": 0.040752249922169775 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.46296296296296297, + "acc_stderr,none": 0.03400603625538272 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.4642857142857143, + "acc_stderr,none": 0.04733667890053756 + }, + "harness|arc:easy|0": { + "acc,none": 0.8106060606060606, + "acc_stderr,none": 0.00804000196687019, + "acc_norm,none": 0.7803030303030303, + "acc_norm_stderr,none": 0.00849594853792877, + "alias": "arc_easy" + }, + "harness|arc:challenge|0": { + "acc,none": 0.5273037542662116, + "acc_stderr,none": 0.014589589101985996, + "acc_norm,none": 0.5520477815699659, + "acc_norm_stderr,none": 0.014532011498211676, + "alias": "arc_challenge" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.3537331701346389, + "acc_stderr,none": 0.016737814358846147, + "alias": "truthfulqa_mc1" + }, + "harness|hellaswag|0": { + "acc,none": 0.5732921728739295, + "acc_stderr,none": 0.004935882666250482, + "acc_norm,none": 0.7543318064130651, + "acc_norm_stderr,none": 0.004296028885089522, + "alias": "hellaswag" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.5153575418054068, + "acc_stderr,none": 0.015290431258948651, + "alias": "truthfulqa_mc2" + }, + "harness|openbookqa|0": { + "acc,none": 0.344, + "acc_stderr,none": 0.02126575803797874, + "acc_norm,none": 0.43, + "acc_norm_stderr,none": 0.02216263442665284, + "alias": "openbookqa" + }, + "harness|piqa|0": { + "acc,none": 0.7736670293797606, + "acc_stderr,none": 0.00976329424687942, + "acc_norm,none": 0.7818280739934712, + "acc_norm_stderr,none": 0.009636081958374381, + "alias": "piqa" + }, + "harness|winogrande|0": { + "acc,none": 0.734017363851618, + "acc_stderr,none": 0.012418323153051043, + "alias": "winogrande" + } + }, + "task_info": { + "model": "solidrust/Meta-Llama-3-8B-Instruct-hf-AWQ", + "revision": "main", + "private": false, + "params": 7.94, + "architectures": "LlamaForCausalLM", + "quant_type": "AWQ", + "precision": "4bit", + "model_params": 15.88, + "model_size": 7.94, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-27T15:54:35Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "bits": 4, + "group_size": 128, + "modules_to_not_convert": null, + "quant_method": "awq", + "version": "gemm", + "zero_point": true + }, + "versions": { + "harness|lambada:openai|0": 1.0, + "harness|boolq|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|arc:easy|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|hellaswag|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|openbookqa|0": 1.0, + "harness|piqa|0": 1.0, + "harness|winogrande|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714252073.7108362, + "config": { + "model": "hf", + "model_args": "pretrained=solidrust/Meta-Llama-3-8B-Instruct-hf-AWQ,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/styalai/results_2024-05-10-13-56-07.json b/styalai/results_2024-05-10-13-56-07.json new file mode 100644 index 0000000000000000000000000000000000000000..3fee0c92a632e4a17cc5aa2400b87f9cafe2df6f --- /dev/null +++ b/styalai/results_2024-05-10-13-56-07.json @@ -0,0 +1,599 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-10-13-56-07", + "total_evaluation_time_secondes": "", + "model_name": "styalai/phi-2_quantize_gptq", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 1.84, + "model_params": 2.7, + "quant_type": "GPTQ", + "precision": "4bit" + }, + "results": { + "harness|arc:easy|0": { + "acc,none": 0.7975589225589226, + "acc_stderr,none": 0.008245156475629189, + "acc_norm,none": 0.7769360269360269, + "acc_norm_stderr,none": 0.00854231416900937, + "alias": "arc_easy" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.4361786144689832, + "acc_stderr,none": 0.014817402443817856, + "alias": "truthfulqa_mc2" + }, + "harness|boolq|0": { + "acc,none": 0.8067278287461773, + "acc_stderr,none": 0.006906224976787107, + "alias": "boolq" + }, + "harness|piqa|0": { + "acc,none": 0.7763873775843307, + "acc_stderr,none": 0.009721489519176294, + "acc_norm,none": 0.7850924918389554, + "acc_norm_stderr,none": 0.009583665082653313, + "alias": "piqa" + }, + "harness|arc:challenge|0": { + "acc,none": 0.515358361774744, + "acc_stderr,none": 0.01460449612939491, + "acc_norm,none": 0.5264505119453925, + "acc_norm_stderr,none": 0.014590931358120169, + "alias": "arc_challenge" + }, + "harness|openbookqa|0": { + "acc,none": 0.39, + "acc_stderr,none": 0.02183468586936921, + "acc_norm,none": 0.492, + "acc_norm_stderr,none": 0.022380208834928028, + "alias": "openbookqa" + }, + "harness|winogrande|0": { + "acc,none": 0.7545382794001578, + "acc_stderr,none": 0.012095272937183642, + "alias": "winogrande" + }, + "harness|lambada:openai|0": { + "perplexity,none": 6.732271716226398, + "perplexity_stderr,none": 0.18372924037735205, + "acc,none": 0.5982922569377062, + "acc_stderr,none": 0.006830049724301386, + "alias": "lambada_openai" + }, + "harness|mmlu|0": { + "acc,none": 0.5225751317476143, + "acc_stderr,none": 0.004019126427264933, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.4684378320935175, + "acc_stderr,none": 0.0068897753578505095 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.30952380952380953, + "acc_stderr,none": 0.04134913018303316 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.6727272727272727, + "acc_stderr,none": 0.03663974994391243 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.6078431372549019, + "acc_stderr,none": 0.03426712349247272 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.7088607594936709, + "acc_stderr,none": 0.02957160106575337 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.7272727272727273, + "acc_stderr,none": 0.04065578140908705 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.6666666666666666, + "acc_stderr,none": 0.04557239513497751 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.6871165644171779, + "acc_stderr,none": 0.03642914578292405 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.6069364161849711, + "acc_stderr,none": 0.02629622791561367 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.23798882681564246, + "acc_stderr,none": 0.014242630070574898 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.5530546623794212, + "acc_stderr,none": 0.028237769422085335 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.5709876543209876, + "acc_stderr,none": 0.027538925613470863 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.3891786179921773, + "acc_stderr,none": 0.012452613934286991 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.6608187134502924, + "acc_stderr,none": 0.03631053496488905 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.5864177663340844, + "acc_stderr,none": 0.008596531783217074 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.53, + "acc_stderr,none": 0.050161355804659205 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.6226415094339622, + "acc_stderr,none": 0.02983280811479601 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.48554913294797686, + "acc_stderr,none": 0.03810871630454764 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.34, + "acc_stderr,none": 0.04760952285695235 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6278026905829597, + "acc_stderr,none": 0.032443052830087304 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.7184466019417476, + "acc_stderr,none": 0.04453254836326467 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.7777777777777778, + "acc_stderr,none": 0.02723601394619669 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.56, + "acc_stderr,none": 0.04988876515698589 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.6871008939974457, + "acc_stderr,none": 0.016580935940304055 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.5849673202614379, + "acc_stderr,none": 0.0282135041778241 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.4078014184397163, + "acc_stderr,none": 0.029316011776343555 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.45588235294117646, + "acc_stderr,none": 0.03025437257397669 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.46987951807228917, + "acc_stderr,none": 0.03885425420866767 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.6269093272668184, + "acc_stderr,none": 0.00849603785258372 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.2631578947368421, + "acc_stderr,none": 0.041424397194893624 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.6919191919191919, + "acc_stderr,none": 0.03289477330098615 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.7409326424870466, + "acc_stderr,none": 0.03161877917935411 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.5461538461538461, + "acc_stderr,none": 0.025242770987126177 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.542016806722689, + "acc_stderr,none": 0.03236361111951941 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.7431192660550459, + "acc_stderr,none": 0.01873249292834248 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.6412213740458015, + "acc_stderr,none": 0.04206739313864908 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.5473856209150327, + "acc_stderr,none": 0.020136790918492523 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6181818181818182, + "acc_stderr,none": 0.046534298079135075 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.6448979591836734, + "acc_stderr,none": 0.03063565515038764 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.7611940298507462, + "acc_stderr,none": 0.03014777593540922 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.74, + "acc_stderr,none": 0.0440844002276808 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.4386298763082778, + "acc_stderr,none": 0.008612772747917762 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.22, + "acc_stderr,none": 0.04163331998932269 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.45185185185185184, + "acc_stderr,none": 0.04299268905480864 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.5131578947368421, + "acc_stderr,none": 0.04067533136309172 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.5486111111111112, + "acc_stderr,none": 0.04161402398403279 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.36, + "acc_stderr,none": 0.04824181513244218 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.4, + "acc_stderr,none": 0.049236596391733084 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.34, + "acc_stderr,none": 0.04760952285695235 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.27450980392156865, + "acc_stderr,none": 0.04440521906179325 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.62, + "acc_stderr,none": 0.048783173121456316 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.46808510638297873, + "acc_stderr,none": 0.03261936918467382 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.47586206896551725, + "acc_stderr,none": 0.041618085035015295 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.37037037037037035, + "acc_stderr,none": 0.024870815251057093 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.6387096774193548, + "acc_stderr,none": 0.027327548447957543 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.46798029556650245, + "acc_stderr,none": 0.035107665979592154 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.63, + "acc_stderr,none": 0.04852365870939099 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.25925925925925924, + "acc_stderr,none": 0.026719240783712163 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.3576158940397351, + "acc_stderr,none": 0.03913453431177258 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.42592592592592593, + "acc_stderr,none": 0.033723432716530624 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.4642857142857143, + "acc_stderr,none": 0.04733667890053756 + }, + "harness|hellaswag|0": { + "acc,none": 0.5454092810197172, + "acc_stderr,none": 0.004969160917379662, + "acc_norm,none": 0.7279426409081856, + "acc_norm_stderr,none": 0.004441097782370478, + "alias": "hellaswag" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.2962056303549572, + "acc_stderr,none": 0.01598359510181139, + "alias": "truthfulqa_mc1" + } + }, + "task_info": { + "model": "styalai/phi-2_quantize_gptq", + "revision": "main", + "private": false, + "params": 6.244, + "architectures": "PhiForCausalLM", + "quant_type": "bitsandbytes", + "precision": "4bit", + "model_params": 2.7, + "model_size": 1.84, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-29T07:14:10Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "batch_size": 1, + "bits": 4, + "block_name_to_quantize": null, + "cache_block_outputs": true, + "damp_percent": 0.1, + "dataset": "c4", + "desc_act": false, + "exllama_config": { + "version": 1 + }, + "group_size": 128, + "max_input_length": null, + "model_seqlen": null, + "module_name_preceding_first_block": null, + "modules_in_block_to_quantize": null, + "pad_token_id": null, + "quant_method": "gptq", + "sym": true, + "tokenizer": null, + "true_sequential": true, + "use_cuda_fp16": false, + "use_exllama": false + }, + "versions": { + "harness|arc:easy|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|boolq|0": 2.0, + "harness|piqa|0": 1.0, + "harness|arc:challenge|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|hellaswag|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1715316065.7549846, + "config": { + "model": "hf", + "model_args": "pretrained=styalai/phi-2_quantize_gptq,trust_remote_code=False,dtype=float16,_commit_hash=main", + "batch_size": 4, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} diff --git a/unsloth/results_2024-04-27-01-16-55.json b/unsloth/results_2024-04-27-01-16-55.json new file mode 100644 index 0000000000000000000000000000000000000000..068e1545c9b869cae979314423082511afc52c72 --- /dev/null +++ b/unsloth/results_2024-04-27-01-16-55.json @@ -0,0 +1,589 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-04-27-01-16-55", + "total_evaluation_time_secondes": "", + "model_name": "unsloth/llama-2-7b-chat-bnb-4bit", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 3.87, + "model_params": 6.68, + "quant_type": "bitsandbytes", + "precision": "4bit" + }, + "results": { + "harness|piqa|0": { + "acc,none": 0.764417845484222, + "acc_stderr,none": 0.009901067586473912, + "acc_norm,none": 0.76550598476605, + "acc_norm_stderr,none": 0.00988520314324055, + "alias": "piqa" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.2937576499388005, + "acc_stderr,none": 0.015945068581236614, + "alias": "truthfulqa_mc1" + }, + "harness|winogrande|0": { + "acc,none": 0.6669297553275454, + "acc_stderr,none": 0.01324619402807065, + "alias": "winogrande" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.45035586657845295, + "acc_stderr,none": 0.01565376338587469, + "alias": "truthfulqa_mc2" + }, + "harness|arc:easy|0": { + "acc,none": 0.7369528619528619, + "acc_stderr,none": 0.009034514898865822, + "acc_norm,none": 0.6982323232323232, + "acc_norm_stderr,none": 0.009418994158522528, + "alias": "arc_easy" + }, + "harness|mmlu|0": { + "acc,none": 0.45107534539239424, + "acc_stderr,none": 0.004041496994837459, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.42422954303931987, + "acc_stderr,none": 0.006916469882448509 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.2619047619047619, + "acc_stderr,none": 0.039325376803928704 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.5575757575757576, + "acc_stderr,none": 0.03878372113711274 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.6715686274509803, + "acc_stderr,none": 0.03296245110172229 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.6118143459915611, + "acc_stderr,none": 0.031722950043323296 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.5867768595041323, + "acc_stderr,none": 0.04495087843548408 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.5370370370370371, + "acc_stderr,none": 0.04820403072760627 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.5214723926380368, + "acc_stderr,none": 0.0392474687675113 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.4884393063583815, + "acc_stderr,none": 0.026911898686377906 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.2424581005586592, + "acc_stderr,none": 0.014333522059217887 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.5369774919614148, + "acc_stderr,none": 0.02832032583010592 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.5524691358024691, + "acc_stderr,none": 0.027667138569422708 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.34485006518904826, + "acc_stderr,none": 0.012139881006287049 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.6666666666666666, + "acc_stderr,none": 0.036155076303109344 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.5352429996781461, + "acc_stderr,none": 0.008705350844769753 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.51, + "acc_stderr,none": 0.05024183937956912 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.5358490566037736, + "acc_stderr,none": 0.030693675018458006 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.3872832369942196, + "acc_stderr,none": 0.037143259063020656 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.38, + "acc_stderr,none": 0.04878317312145632 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.5964125560538116, + "acc_stderr,none": 0.03292802819330313 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.6213592233009708, + "acc_stderr,none": 0.04802694698258975 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.7051282051282052, + "acc_stderr,none": 0.02987257770889118 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.46, + "acc_stderr,none": 0.05009082659620332 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.6666666666666666, + "acc_stderr,none": 0.016857391247472552 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.4934640522875817, + "acc_stderr,none": 0.02862747055055606 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.35106382978723405, + "acc_stderr,none": 0.028473501272963758 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.4227941176470588, + "acc_stderr,none": 0.030008562845003476 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.42168674698795183, + "acc_stderr,none": 0.03844453181770918 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.5079623009424764, + "acc_stderr,none": 0.008792891268502986 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.3157894736842105, + "acc_stderr,none": 0.04372748290278007 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.5808080808080808, + "acc_stderr,none": 0.03515520728670417 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.6476683937823834, + "acc_stderr,none": 0.03447478286414357 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.37948717948717947, + "acc_stderr,none": 0.024603626924097417 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.37815126050420167, + "acc_stderr,none": 0.031499305777849054 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.5926605504587156, + "acc_stderr,none": 0.021065986244412888 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.5419847328244275, + "acc_stderr,none": 0.04369802690578757 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.4362745098039216, + "acc_stderr,none": 0.02006287424353913 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.5181818181818182, + "acc_stderr,none": 0.04785964010794916 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.47346938775510206, + "acc_stderr,none": 0.03196412734523272 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.7014925373134329, + "acc_stderr,none": 0.032357437893550424 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.74, + "acc_stderr,none": 0.0440844002276808 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.3526799873136695, + "acc_stderr,none": 0.008379717025521196 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.27, + "acc_stderr,none": 0.0446196043338474 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.42962962962962964, + "acc_stderr,none": 0.04276349494376599 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.4605263157894737, + "acc_stderr,none": 0.04056242252249033 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.4861111111111111, + "acc_stderr,none": 0.04179596617581 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.25, + "acc_stderr,none": 0.04351941398892446 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.28, + "acc_stderr,none": 0.04512608598542127 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.3, + "acc_stderr,none": 0.046056618647183814 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.21568627450980393, + "acc_stderr,none": 0.040925639582376536 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.55, + "acc_stderr,none": 0.05 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.3829787234042553, + "acc_stderr,none": 0.031778212502369216 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.42758620689655175, + "acc_stderr,none": 0.04122737111370331 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.2830687830687831, + "acc_stderr,none": 0.023201392938194978 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.47096774193548385, + "acc_stderr,none": 0.028396016402761 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.33497536945812806, + "acc_stderr,none": 0.033208527423483104 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.43, + "acc_stderr,none": 0.049756985195624284 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.2814814814814815, + "acc_stderr,none": 0.027420019350945277 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.2582781456953642, + "acc_stderr,none": 0.035737053147634576 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.25925925925925924, + "acc_stderr,none": 0.029886910547626964 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.35714285714285715, + "acc_stderr,none": 0.04547960999764376 + }, + "harness|openbookqa|0": { + "acc,none": 0.34, + "acc_stderr,none": 0.021206117013673063, + "acc_norm,none": 0.424, + "acc_norm_stderr,none": 0.022122993778135404, + "alias": "openbookqa" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.3804927836594665, + "perplexity_stderr,none": 0.09041913950351517, + "acc,none": 0.7023093343683291, + "acc_stderr,none": 0.006370285573012031, + "alias": "lambada_openai" + }, + "harness|hellaswag|0": { + "acc,none": 0.5731925911173074, + "acc_stderr,none": 0.004936029827672042, + "acc_norm,none": 0.7499502091216889, + "acc_norm_stderr,none": 0.004321564303822486, + "alias": "hellaswag" + }, + "harness|boolq|0": { + "acc,none": 0.7948012232415902, + "acc_stderr,none": 0.007063324955682799, + "alias": "boolq" + }, + "harness|arc:challenge|0": { + "acc,none": 0.4257679180887372, + "acc_stderr,none": 0.014449464278868802, + "acc_norm,none": 0.4257679180887372, + "acc_norm_stderr,none": 0.014449464278868809, + "alias": "arc_challenge" + } + }, + "task_info": { + "model": "unsloth/llama-2-7b-chat-bnb-4bit", + "revision": "main", + "private": false, + "params": 14.408, + "architectures": "LlamaForCausalLM", + "quant_type": "bitsandbytes", + "precision": "4bit", + "model_params": 28.816, + "model_size": 14.408, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-26T15:24:50Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "_load_in_4bit": true, + "_load_in_8bit": false, + "bnb_4bit_compute_dtype": "float16", + "bnb_4bit_quant_type": "nf4", + "bnb_4bit_use_double_quant": true, + "llm_int8_enable_fp32_cpu_offload": false, + "llm_int8_has_fp16_weight": false, + "llm_int8_skip_modules": null, + "llm_int8_threshold": 6.0, + "load_in_4bit": true, + "load_in_8bit": false, + "quant_method": "bitsandbytes" + }, + "versions": { + "harness|piqa|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|winogrande|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|arc:easy|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|openbookqa|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|boolq|0": 2.0, + "harness|arc:challenge|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714145243.1145976, + "config": { + "model": "hf", + "model_args": "pretrained=unsloth/llama-2-7b-chat-bnb-4bit,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/unsloth/results_2024-04-28-19-17-21.json b/unsloth/results_2024-04-28-19-17-21.json new file mode 100644 index 0000000000000000000000000000000000000000..1944bb30fbb8839579852cf52cf1fc32dd52429e --- /dev/null +++ b/unsloth/results_2024-04-28-19-17-21.json @@ -0,0 +1,589 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-04-28-19-17-21", + "total_evaluation_time_secondes": "", + "model_name": "unsloth/llama-3-8b-Instruct-bnb-4bit", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 5.7, + "model_params": 7.2, + "quant_type": "bitsandbytes", + "precision": "4bit" + }, + "results": { + "harness|arc:challenge|0": { + "acc,none": 0.5281569965870307, + "acc_stderr,none": 0.014588204105102202, + "acc_norm,none": 0.5494880546075085, + "acc_norm_stderr,none": 0.014539646098471627, + "alias": "arc_challenge" + }, + "harness|mmlu|0": { + "acc,none": 0.6140150975644495, + "acc_stderr,none": 0.003872725575522531, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5504782146652497, + "acc_stderr,none": 0.006754642643009837 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.47619047619047616, + "acc_stderr,none": 0.04467062628403273 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7333333333333333, + "acc_stderr,none": 0.03453131801885417 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.7990196078431373, + "acc_stderr,none": 0.028125972265654362 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.8143459915611815, + "acc_stderr,none": 0.025310495376944867 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.7024793388429752, + "acc_stderr,none": 0.04173349148083499 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.75, + "acc_stderr,none": 0.04186091791394607 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7423312883435583, + "acc_stderr,none": 0.03436150827846917 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.6878612716763006, + "acc_stderr,none": 0.024946792225272314 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.2770949720670391, + "acc_stderr,none": 0.014968772435812145 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.684887459807074, + "acc_stderr,none": 0.026385273703464492 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.7191358024691358, + "acc_stderr,none": 0.025006469755799208 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.4589308996088657, + "acc_stderr,none": 0.012727084826799797 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7602339181286549, + "acc_stderr,none": 0.03274485211946956 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.6997103315094947, + "acc_stderr,none": 0.007946042040283596 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.69, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.7283018867924528, + "acc_stderr,none": 0.027377706624670713 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.6184971098265896, + "acc_stderr,none": 0.03703851193099521 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.41, + "acc_stderr,none": 0.049431107042371025 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6457399103139013, + "acc_stderr,none": 0.03210062154134987 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.8058252427184466, + "acc_stderr,none": 0.03916667762822584 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8803418803418803, + "acc_stderr,none": 0.021262719400406953 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.81, + "acc_stderr,none": 0.03942772444036623 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.789272030651341, + "acc_stderr,none": 0.014583812465862543 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.7091503267973857, + "acc_stderr,none": 0.02600480036395213 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.4929078014184397, + "acc_stderr,none": 0.02982449855912901 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.7132352941176471, + "acc_stderr,none": 0.02747227447323382 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.4939759036144578, + "acc_stderr,none": 0.03892212195333045 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.7182320441988951, + "acc_stderr,none": 0.007943824324050873 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.43859649122807015, + "acc_stderr,none": 0.04668000738510455 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7727272727272727, + "acc_stderr,none": 0.02985751567338641 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8393782383419689, + "acc_stderr,none": 0.026499057701397433 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.6256410256410256, + "acc_stderr,none": 0.024537591572830513 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.7226890756302521, + "acc_stderr,none": 0.029079374539480007 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.8128440366972477, + "acc_stderr,none": 0.016722684526200172 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.7633587786259542, + "acc_stderr,none": 0.037276735755969154 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.6519607843137255, + "acc_stderr,none": 0.01927099870822398 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6363636363636364, + "acc_stderr,none": 0.046075820907199756 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.6938775510204082, + "acc_stderr,none": 0.02950489645459597 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.8258706467661692, + "acc_stderr,none": 0.026814951200421603 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.81, + "acc_stderr,none": 0.03942772444036623 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.5226768157310498, + "acc_stderr,none": 0.008629280200619318 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.31, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.5777777777777777, + "acc_stderr,none": 0.04266763404099582 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.6710526315789473, + "acc_stderr,none": 0.03823428969926604 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.7430555555555556, + "acc_stderr,none": 0.03653946969442099 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.41, + "acc_stderr,none": 0.04943110704237102 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.48, + "acc_stderr,none": 0.050211673156867795 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.31, + "acc_stderr,none": 0.04648231987117316 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.47058823529411764, + "acc_stderr,none": 0.049665709039785295 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.74, + "acc_stderr,none": 0.044084400227680794 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.5404255319148936, + "acc_stderr,none": 0.03257901482099835 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.6137931034482759, + "acc_stderr,none": 0.04057324734419036 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.43386243386243384, + "acc_stderr,none": 0.02552503438247489 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7064516129032258, + "acc_stderr,none": 0.025906087021319288 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.4729064039408867, + "acc_stderr,none": 0.03512819077876106 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.62, + "acc_stderr,none": 0.048783173121456316 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.35555555555555557, + "acc_stderr,none": 0.029185714949857403 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.45695364238410596, + "acc_stderr,none": 0.04067325174247443 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.5185185185185185, + "acc_stderr,none": 0.034076320938540516 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.48214285714285715, + "acc_stderr,none": 0.047427623612430116 + }, + "harness|boolq|0": { + "acc,none": 0.826605504587156, + "acc_stderr,none": 0.006621545068373126, + "alias": "boolq" + }, + "harness|winogrande|0": { + "acc,none": 0.7198105761641673, + "acc_stderr,none": 0.012621707979798499, + "alias": "winogrande" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.35495716034271724, + "acc_stderr,none": 0.0167508623813759, + "alias": "truthfulqa_mc1" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.5187467156778944, + "acc_stderr,none": 0.01532066674841558, + "alias": "truthfulqa_mc2" + }, + "harness|openbookqa|0": { + "acc,none": 0.338, + "acc_stderr,none": 0.02117566569520941, + "acc_norm,none": 0.422, + "acc_norm_stderr,none": 0.022109039310618552, + "alias": "openbookqa" + }, + "harness|piqa|0": { + "acc,none": 0.7758433079434167, + "acc_stderr,none": 0.00972989795641004, + "acc_norm,none": 0.7758433079434167, + "acc_norm_stderr,none": 0.009729897956410034, + "alias": "piqa" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.239022847175918, + "perplexity_stderr,none": 0.08064531309398792, + "acc,none": 0.7158936541820299, + "acc_stderr,none": 0.006283140862669237, + "alias": "lambada_openai" + }, + "harness|hellaswag|0": { + "acc,none": 0.5704043019318861, + "acc_stderr,none": 0.00494006740203104, + "acc_norm,none": 0.7536347341167098, + "acc_norm_stderr,none": 0.0043001312233407534, + "alias": "hellaswag" + }, + "harness|arc:easy|0": { + "acc,none": 0.8093434343434344, + "acc_stderr,none": 0.008060472485266111, + "acc_norm,none": 0.7794612794612794, + "acc_norm_stderr,none": 0.008507616235669013, + "alias": "arc_easy" + } + }, + "task_info": { + "model": "unsloth/llama-3-8b-Instruct-bnb-4bit", + "revision": "main", + "private": false, + "params": 18.6, + "architectures": "LlamaForCausalLM", + "quant_type": "bitsandbytes", + "precision": "4bit", + "model_params": 37.2, + "model_size": 18.6, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-27T15:55:26Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "_load_in_4bit": true, + "_load_in_8bit": false, + "bnb_4bit_compute_dtype": "bfloat16", + "bnb_4bit_quant_type": "nf4", + "bnb_4bit_use_double_quant": true, + "llm_int8_enable_fp32_cpu_offload": false, + "llm_int8_has_fp16_weight": false, + "llm_int8_skip_modules": null, + "llm_int8_threshold": 6.0, + "load_in_4bit": true, + "load_in_8bit": false, + "quant_method": "bitsandbytes" + }, + "versions": { + "harness|arc:challenge|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|boolq|0": 2.0, + "harness|winogrande|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|openbookqa|0": 1.0, + "harness|piqa|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|arc:easy|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714280945.2252731, + "config": { + "model": "hf", + "model_args": "pretrained=unsloth/llama-3-8b-Instruct-bnb-4bit,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/unsloth/results_2024-04-29-09-35-26.json b/unsloth/results_2024-04-29-09-35-26.json new file mode 100644 index 0000000000000000000000000000000000000000..9374b26130d5571c6c514cf46d6f42625017fe71 --- /dev/null +++ b/unsloth/results_2024-04-29-09-35-26.json @@ -0,0 +1,587 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-04-29-09-35-26", + "total_evaluation_time_secondes": "", + "model_name": "unsloth/mistral-7b-instruct-v0.2-bnb-4bit", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 4.13, + "model_params": 7.2, + "quant_type": "bitsandbytes", + "precision": "4bit" + }, + "results": { + "harness|truthfulqa:mc1|0": { + "acc,none": 0.4810281517747858, + "acc_stderr,none": 0.01749089640576235, + "alias": "truthfulqa_mc1" + }, + "harness|piqa|0": { + "acc,none": 0.7992383025027203, + "acc_stderr,none": 0.00934596167482341, + "acc_norm,none": 0.8014145810663765, + "acc_norm_stderr,none": 0.009307814521717889, + "alias": "piqa" + }, + "harness|winogrande|0": { + "acc,none": 0.7205998421468035, + "acc_stderr,none": 0.012610826539404681, + "alias": "winogrande" + }, + "harness|openbookqa|0": { + "acc,none": 0.34, + "acc_stderr,none": 0.021206117013673066, + "acc_norm,none": 0.456, + "acc_norm_stderr,none": 0.022296238348407063, + "alias": "openbookqa" + }, + "harness|hellaswag|0": { + "acc,none": 0.655646285600478, + "acc_stderr,none": 0.004741859753178436, + "acc_norm,none": 0.8330013941445927, + "acc_norm_stderr,none": 0.003722123709610337, + "alias": "hellaswag" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.6500702943191489, + "acc_stderr,none": 0.015376744726315254, + "alias": "truthfulqa_mc2" + }, + "harness|arc:challenge|0": { + "acc,none": 0.5443686006825939, + "acc_stderr,none": 0.014553749939306863, + "acc_norm,none": 0.5546075085324232, + "acc_norm_stderr,none": 0.014523987638344078, + "alias": "arc_challenge" + }, + "harness|lambada:openai|0": { + "perplexity,none": 3.4361763813899864, + "perplexity_stderr,none": 0.07436778363389848, + "acc,none": 0.7135649136425384, + "acc_stderr,none": 0.006298569473987371, + "alias": "lambada_openai" + }, + "harness|boolq|0": { + "acc,none": 0.8513761467889909, + "acc_stderr,none": 0.0062215348353496834, + "alias": "boolq" + }, + "harness|mmlu|0": { + "acc,none": 0.5832502492522432, + "acc_stderr,none": 0.003959408393105344, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.5424017003188097, + "acc_stderr,none": 0.0068715499250283345 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.3412698412698413, + "acc_stderr,none": 0.04240799327574924 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7333333333333333, + "acc_stderr,none": 0.03453131801885417 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.7549019607843137, + "acc_stderr,none": 0.030190282453501933 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.759493670886076, + "acc_stderr,none": 0.02782078198114968 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.7272727272727273, + "acc_stderr,none": 0.04065578140908705 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.7222222222222222, + "acc_stderr,none": 0.043300437496507416 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7668711656441718, + "acc_stderr,none": 0.0332201579577674 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.6734104046242775, + "acc_stderr,none": 0.025248264774242826 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.36201117318435755, + "acc_stderr,none": 0.016073067350153084 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.6463022508038585, + "acc_stderr,none": 0.02715520810320087 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.6882716049382716, + "acc_stderr,none": 0.02577311116963045 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.4217731421121252, + "acc_stderr,none": 0.01261297436939098 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.7894736842105263, + "acc_stderr,none": 0.031267817146631786 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.6504666881235919, + "acc_stderr,none": 0.00822417958072344 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.56, + "acc_stderr,none": 0.04988876515698589 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.6566037735849056, + "acc_stderr,none": 0.02922452646912479 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.5606936416184971, + "acc_stderr,none": 0.03784271932887467 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.37, + "acc_stderr,none": 0.048523658709391 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.600896860986547, + "acc_stderr,none": 0.032867453125679603 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.7281553398058253, + "acc_stderr,none": 0.044052680241409216 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8675213675213675, + "acc_stderr,none": 0.02220930907316562 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.62, + "acc_stderr,none": 0.04878317312145632 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.7867177522349936, + "acc_stderr,none": 0.014648172749593513 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.6437908496732027, + "acc_stderr,none": 0.02742047766262925 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.4219858156028369, + "acc_stderr,none": 0.029462189233370597 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.625, + "acc_stderr,none": 0.029408372932278746 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.4879518072289157, + "acc_stderr,none": 0.038913644958358196 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.6701332466688332, + "acc_stderr,none": 0.008254713883721222 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.41228070175438597, + "acc_stderr,none": 0.04630653203366596 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7222222222222222, + "acc_stderr,none": 0.03191178226713547 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.8186528497409327, + "acc_stderr,none": 0.027807032360686088 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.5384615384615384, + "acc_stderr,none": 0.025275892070240634 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.6176470588235294, + "acc_stderr,none": 0.03156663099215416 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.7761467889908257, + "acc_stderr,none": 0.017871217767790215 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.6946564885496184, + "acc_stderr,none": 0.04039314978724562 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.5882352941176471, + "acc_stderr,none": 0.019910377463105935 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6181818181818182, + "acc_stderr,none": 0.046534298079135075 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.6816326530612244, + "acc_stderr,none": 0.029822533793982076 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.8208955223880597, + "acc_stderr,none": 0.027113286753111837 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.83, + "acc_stderr,none": 0.0377525168068637 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.4931810973675864, + "acc_stderr,none": 0.008672919042542734 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.27, + "acc_stderr,none": 0.044619604333847415 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.6, + "acc_stderr,none": 0.04232073695151589 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.6447368421052632, + "acc_stderr,none": 0.038947344870133176 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.6458333333333334, + "acc_stderr,none": 0.039994111357535424 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.38, + "acc_stderr,none": 0.04878317312145633 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.51, + "acc_stderr,none": 0.05024183937956912 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.37, + "acc_stderr,none": 0.048523658709391 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.4019607843137255, + "acc_stderr,none": 0.04878608714466996 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.68, + "acc_stderr,none": 0.046882617226215034 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.48936170212765956, + "acc_stderr,none": 0.03267862331014063 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5517241379310345, + "acc_stderr,none": 0.04144311810878152 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.42857142857142855, + "acc_stderr,none": 0.025487187147859372 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.6645161290322581, + "acc_stderr,none": 0.026860206444724352 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.46798029556650245, + "acc_stderr,none": 0.03510766597959214 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.65, + "acc_stderr,none": 0.047937248544110196 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.337037037037037, + "acc_stderr,none": 0.02882088466625326 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.33774834437086093, + "acc_stderr,none": 0.038615575462551684 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.4583333333333333, + "acc_stderr,none": 0.033981108902946366 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.5089285714285714, + "acc_stderr,none": 0.04745033255489123 + }, + "harness|arc:easy|0": { + "acc,none": 0.8198653198653199, + "acc_stderr,none": 0.007885661261794777, + "acc_norm,none": 0.7718855218855218, + "acc_norm_stderr,none": 0.008610355160815555, + "alias": "arc_easy" + } + }, + "task_info": { + "model": "unsloth/mistral-7b-instruct-v0.2-bnb-4bit", + "revision": "main", + "private": false, + "params": 15.448, + "architectures": "MistralForCausalLM", + "quant_type": "bitsandbytes", + "precision": "4bit", + "model_params": 30.896, + "model_size": 15.448, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-04-28T09:01:12Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "bnb_4bit_compute_dtype": "bfloat16", + "bnb_4bit_quant_type": "nf4", + "bnb_4bit_use_double_quant": true, + "llm_int8_enable_fp32_cpu_offload": false, + "llm_int8_has_fp16_weight": false, + "llm_int8_skip_modules": null, + "llm_int8_threshold": 6.0, + "load_in_4bit": true, + "load_in_8bit": false, + "quant_method": "bitsandbytes" + }, + "versions": { + "harness|truthfulqa:mc1|0": 2.0, + "harness|piqa|0": 1.0, + "harness|winogrande|0": 1.0, + "harness|openbookqa|0": 1.0, + "harness|hellaswag|0": 1.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|arc:challenge|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|boolq|0": 2.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|arc:easy|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714329970.6590977, + "config": { + "model": "hf", + "model_args": "pretrained=unsloth/mistral-7b-instruct-v0.2-bnb-4bit,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file diff --git a/unsloth/results_2024-05-02-07-09-50.json b/unsloth/results_2024-05-02-07-09-50.json new file mode 100644 index 0000000000000000000000000000000000000000..0038554cf81d876e8a1e9a642a717bd3e14b7efa --- /dev/null +++ b/unsloth/results_2024-05-02-07-09-50.json @@ -0,0 +1,589 @@ +{ + "config_general": { + "lighteval_sha": "1.4", + "num_few_shot_default": null, + "num_fewshot_seeds": null, + "override_batch_size": null, + "max_samples": null, + "job_id": -1, + "start_time": null, + "end_time": "2024-05-02-07-09-50", + "total_evaluation_time_secondes": "", + "model_name": "unsloth/gemma-7b-bnb-4bit", + "model_sha": "", + "model_dtype": "4bit", + "model_size": 5.57, + "model_params": 7.99, + "quant_type": "bitsandbytes", + "precision": "4bit" + }, + "results": { + "harness|arc:challenge|0": { + "acc,none": 0.5034129692832765, + "acc_stderr,none": 0.014611050403244084, + "acc_norm,none": 0.5315699658703071, + "acc_norm_stderr,none": 0.014582236460866965, + "alias": "arc_challenge" + }, + "harness|truthfulqa:mc1|0": { + "acc,none": 0.29253365973072215, + "acc_stderr,none": 0.015925597445286165, + "alias": "truthfulqa_mc1" + }, + "harness|winogrande|0": { + "acc,none": 0.7434885556432518, + "acc_stderr,none": 0.012273648008759984, + "alias": "winogrande" + }, + "harness|boolq|0": { + "acc,none": 0.7764525993883792, + "acc_stderr,none": 0.007286766045641763, + "alias": "boolq" + }, + "harness|truthfulqa:mc2|0": { + "acc,none": 0.41357536298239544, + "acc_stderr,none": 0.014304144616348345, + "alias": "truthfulqa_mc2" + }, + "harness|openbookqa|0": { + "acc,none": 0.33, + "acc_stderr,none": 0.021049612166134806, + "acc_norm,none": 0.45, + "acc_norm_stderr,none": 0.02227087748536044, + "alias": "openbookqa" + }, + "harness|arc:easy|0": { + "acc,none": 0.8093434343434344, + "acc_stderr,none": 0.008060472485266111, + "acc_norm,none": 0.7971380471380471, + "acc_norm_stderr,none": 0.008251544823606904, + "alias": "arc_easy" + }, + "harness|mmlu|0": { + "acc,none": 0.5893747329440251, + "acc_stderr,none": 0.0039235706021663475, + "alias": "mmlu" + }, + "harness|mmlu_humanities|0": { + "alias": " - humanities", + "acc,none": 0.528586609989373, + "acc_stderr,none": 0.006718092467683873 + }, + "harness|mmlu_formal_logic|0": { + "alias": " - formal_logic", + "acc,none": 0.3492063492063492, + "acc_stderr,none": 0.04263906892795132 + }, + "harness|mmlu_high_school_european_history|0": { + "alias": " - high_school_european_history", + "acc,none": 0.7272727272727273, + "acc_stderr,none": 0.0347769116216366 + }, + "harness|mmlu_high_school_us_history|0": { + "alias": " - high_school_us_history", + "acc,none": 0.7598039215686274, + "acc_stderr,none": 0.02998373305591362 + }, + "harness|mmlu_high_school_world_history|0": { + "alias": " - high_school_world_history", + "acc,none": 0.7805907172995781, + "acc_stderr,none": 0.026939106581553945 + }, + "harness|mmlu_international_law|0": { + "alias": " - international_law", + "acc,none": 0.7355371900826446, + "acc_stderr,none": 0.04026187527591206 + }, + "harness|mmlu_jurisprudence|0": { + "alias": " - jurisprudence", + "acc,none": 0.6574074074074074, + "acc_stderr,none": 0.045879047413018105 + }, + "harness|mmlu_logical_fallacies|0": { + "alias": " - logical_fallacies", + "acc,none": 0.7239263803680982, + "acc_stderr,none": 0.03512385283705048 + }, + "harness|mmlu_moral_disputes|0": { + "alias": " - moral_disputes", + "acc,none": 0.653179190751445, + "acc_stderr,none": 0.025624723994030457 + }, + "harness|mmlu_moral_scenarios|0": { + "alias": " - moral_scenarios", + "acc,none": 0.2424581005586592, + "acc_stderr,none": 0.014333522059217887 + }, + "harness|mmlu_philosophy|0": { + "alias": " - philosophy", + "acc,none": 0.7106109324758842, + "acc_stderr,none": 0.025755865922632935 + }, + "harness|mmlu_prehistory|0": { + "alias": " - prehistory", + "acc,none": 0.7160493827160493, + "acc_stderr,none": 0.025089478523765137 + }, + "harness|mmlu_professional_law|0": { + "alias": " - professional_law", + "acc,none": 0.43741851368970014, + "acc_stderr,none": 0.012669813464935726 + }, + "harness|mmlu_world_religions|0": { + "alias": " - world_religions", + "acc,none": 0.8070175438596491, + "acc_stderr,none": 0.030267457554898465 + }, + "harness|mmlu_other|0": { + "alias": " - other", + "acc,none": 0.6655938204055359, + "acc_stderr,none": 0.008223636843621119 + }, + "harness|mmlu_business_ethics|0": { + "alias": " - business_ethics", + "acc,none": 0.63, + "acc_stderr,none": 0.04852365870939098 + }, + "harness|mmlu_clinical_knowledge|0": { + "alias": " - clinical_knowledge", + "acc,none": 0.6339622641509434, + "acc_stderr,none": 0.029647813539365235 + }, + "harness|mmlu_college_medicine|0": { + "alias": " - college_medicine", + "acc,none": 0.6069364161849711, + "acc_stderr,none": 0.03724249595817729 + }, + "harness|mmlu_global_facts|0": { + "alias": " - global_facts", + "acc,none": 0.4, + "acc_stderr,none": 0.049236596391733084 + }, + "harness|mmlu_human_aging|0": { + "alias": " - human_aging", + "acc,none": 0.6636771300448431, + "acc_stderr,none": 0.031708824268455005 + }, + "harness|mmlu_management|0": { + "alias": " - management", + "acc,none": 0.8446601941747572, + "acc_stderr,none": 0.03586594738573974 + }, + "harness|mmlu_marketing|0": { + "alias": " - marketing", + "acc,none": 0.8418803418803419, + "acc_stderr,none": 0.023902325549560406 + }, + "harness|mmlu_medical_genetics|0": { + "alias": " - medical_genetics", + "acc,none": 0.68, + "acc_stderr,none": 0.046882617226215034 + }, + "harness|mmlu_miscellaneous|0": { + "alias": " - miscellaneous", + "acc,none": 0.7739463601532567, + "acc_stderr,none": 0.01495745850433583 + }, + "harness|mmlu_nutrition|0": { + "alias": " - nutrition", + "acc,none": 0.6568627450980392, + "acc_stderr,none": 0.02718449890994161 + }, + "harness|mmlu_professional_accounting|0": { + "alias": " - professional_accounting", + "acc,none": 0.5035460992907801, + "acc_stderr,none": 0.02982674915328092 + }, + "harness|mmlu_professional_medicine|0": { + "alias": " - professional_medicine", + "acc,none": 0.5882352941176471, + "acc_stderr,none": 0.029896163033125478 + }, + "harness|mmlu_virology|0": { + "alias": " - virology", + "acc,none": 0.5, + "acc_stderr,none": 0.03892494720807614 + }, + "harness|mmlu_social_sciences|0": { + "alias": " - social_sciences", + "acc,none": 0.6756581085472864, + "acc_stderr,none": 0.00824343296417136 + }, + "harness|mmlu_econometrics|0": { + "alias": " - econometrics", + "acc,none": 0.37719298245614036, + "acc_stderr,none": 0.04559522141958216 + }, + "harness|mmlu_high_school_geography|0": { + "alias": " - high_school_geography", + "acc,none": 0.7727272727272727, + "acc_stderr,none": 0.029857515673386407 + }, + "harness|mmlu_high_school_government_and_politics|0": { + "alias": " - high_school_government_and_politics", + "acc,none": 0.7823834196891192, + "acc_stderr,none": 0.029778663037752954 + }, + "harness|mmlu_high_school_macroeconomics|0": { + "alias": " - high_school_macroeconomics", + "acc,none": 0.5615384615384615, + "acc_stderr,none": 0.02515826601686857 + }, + "harness|mmlu_high_school_microeconomics|0": { + "alias": " - high_school_microeconomics", + "acc,none": 0.6428571428571429, + "acc_stderr,none": 0.031124619309328177 + }, + "harness|mmlu_high_school_psychology|0": { + "alias": " - high_school_psychology", + "acc,none": 0.7926605504587156, + "acc_stderr,none": 0.017381415563608674 + }, + "harness|mmlu_human_sexuality|0": { + "alias": " - human_sexuality", + "acc,none": 0.6641221374045801, + "acc_stderr,none": 0.04142313771996665 + }, + "harness|mmlu_professional_psychology|0": { + "alias": " - professional_psychology", + "acc,none": 0.6062091503267973, + "acc_stderr,none": 0.019766211991073063 + }, + "harness|mmlu_public_relations|0": { + "alias": " - public_relations", + "acc,none": 0.6363636363636364, + "acc_stderr,none": 0.04607582090719976 + }, + "harness|mmlu_security_studies|0": { + "alias": " - security_studies", + "acc,none": 0.6775510204081633, + "acc_stderr,none": 0.029923100563683906 + }, + "harness|mmlu_sociology|0": { + "alias": " - sociology", + "acc,none": 0.7611940298507462, + "acc_stderr,none": 0.03014777593540922 + }, + "harness|mmlu_us_foreign_policy|0": { + "alias": " - us_foreign_policy", + "acc,none": 0.81, + "acc_stderr,none": 0.039427724440366234 + }, + "harness|mmlu_stem|0": { + "alias": " - stem", + "acc,none": 0.5207738661592134, + "acc_stderr,none": 0.008628221725616015 + }, + "harness|mmlu_abstract_algebra|0": { + "alias": " - abstract_algebra", + "acc,none": 0.33, + "acc_stderr,none": 0.047258156262526045 + }, + "harness|mmlu_anatomy|0": { + "alias": " - anatomy", + "acc,none": 0.5851851851851851, + "acc_stderr,none": 0.042561937679014075 + }, + "harness|mmlu_astronomy|0": { + "alias": " - astronomy", + "acc,none": 0.6776315789473685, + "acc_stderr,none": 0.03803510248351585 + }, + "harness|mmlu_college_biology|0": { + "alias": " - college_biology", + "acc,none": 0.7222222222222222, + "acc_stderr,none": 0.037455547914624576 + }, + "harness|mmlu_college_chemistry|0": { + "alias": " - college_chemistry", + "acc,none": 0.49, + "acc_stderr,none": 0.05024183937956913 + }, + "harness|mmlu_college_computer_science|0": { + "alias": " - college_computer_science", + "acc,none": 0.5, + "acc_stderr,none": 0.050251890762960605 + }, + "harness|mmlu_college_mathematics|0": { + "alias": " - college_mathematics", + "acc,none": 0.32, + "acc_stderr,none": 0.046882617226215034 + }, + "harness|mmlu_college_physics|0": { + "alias": " - college_physics", + "acc,none": 0.4019607843137255, + "acc_stderr,none": 0.048786087144669955 + }, + "harness|mmlu_computer_security|0": { + "alias": " - computer_security", + "acc,none": 0.66, + "acc_stderr,none": 0.04760952285695237 + }, + "harness|mmlu_conceptual_physics|0": { + "alias": " - conceptual_physics", + "acc,none": 0.5659574468085107, + "acc_stderr,none": 0.032400380867927465 + }, + "harness|mmlu_electrical_engineering|0": { + "alias": " - electrical_engineering", + "acc,none": 0.5793103448275863, + "acc_stderr,none": 0.0411391498118926 + }, + "harness|mmlu_elementary_mathematics|0": { + "alias": " - elementary_mathematics", + "acc,none": 0.4312169312169312, + "acc_stderr,none": 0.0255064816981382 + }, + "harness|mmlu_high_school_biology|0": { + "alias": " - high_school_biology", + "acc,none": 0.7258064516129032, + "acc_stderr,none": 0.0253781399708852 + }, + "harness|mmlu_high_school_chemistry|0": { + "alias": " - high_school_chemistry", + "acc,none": 0.541871921182266, + "acc_stderr,none": 0.03505630140785741 + }, + "harness|mmlu_high_school_computer_science|0": { + "alias": " - high_school_computer_science", + "acc,none": 0.58, + "acc_stderr,none": 0.049604496374885836 + }, + "harness|mmlu_high_school_mathematics|0": { + "alias": " - high_school_mathematics", + "acc,none": 0.3296296296296296, + "acc_stderr,none": 0.028661201116524575 + }, + "harness|mmlu_high_school_physics|0": { + "alias": " - high_school_physics", + "acc,none": 0.41721854304635764, + "acc_stderr,none": 0.040261414976346104 + }, + "harness|mmlu_high_school_statistics|0": { + "alias": " - high_school_statistics", + "acc,none": 0.5185185185185185, + "acc_stderr,none": 0.034076320938540516 + }, + "harness|mmlu_machine_learning|0": { + "alias": " - machine_learning", + "acc,none": 0.42857142857142855, + "acc_stderr,none": 0.04697113923010212 + }, + "harness|piqa|0": { + "acc,none": 0.7927094668117519, + "acc_stderr,none": 0.00945784469995238, + "acc_norm,none": 0.8101196953210011, + "acc_norm_stderr,none": 0.009150819250948718, + "alias": "piqa" + }, + "harness|lambada:openai|0": { + "perplexity,none": 4.447799682451166, + "perplexity_stderr,none": 0.09580496660000092, + "acc,none": 0.6565107704249952, + "acc_stderr,none": 0.006615909601014025, + "alias": "lambada_openai" + }, + "harness|hellaswag|0": { + "acc,none": 0.5889265086636128, + "acc_stderr,none": 0.004910229643262739, + "acc_norm,none": 0.7869946225851424, + "acc_norm_stderr,none": 0.00408594656444253, + "alias": "hellaswag" + } + }, + "task_info": { + "model": "unsloth/gemma-7b-bnb-4bit", + "revision": "main", + "private": false, + "params": 19.136, + "architectures": "GemmaForCausalLM", + "quant_type": "bitsandbytes", + "precision": "4bit", + "model_params": 38.272, + "model_size": 19.136, + "weight_dtype": "int4", + "compute_dtype": "float16", + "gguf_ftype": "*Q4_0.gguf", + "hardware": "gpu", + "status": "Pending", + "submitted_time": "2024-05-01T07:39:37Z", + "model_type": "quantization", + "job_id": -1, + "job_start_time": null, + "scripts": "ITREX" + }, + "quantization_config": { + "_load_in_4bit": true, + "_load_in_8bit": false, + "bnb_4bit_compute_dtype": "bfloat16", + "bnb_4bit_quant_type": "nf4", + "bnb_4bit_use_double_quant": true, + "llm_int8_enable_fp32_cpu_offload": false, + "llm_int8_has_fp16_weight": false, + "llm_int8_skip_modules": null, + "llm_int8_threshold": 6.0, + "load_in_4bit": true, + "load_in_8bit": false, + "quant_method": "bitsandbytes" + }, + "versions": { + "harness|arc:challenge|0": 1.0, + "harness|truthfulqa:mc1|0": 2.0, + "harness|winogrande|0": 1.0, + "harness|boolq|0": 2.0, + "harness|truthfulqa:mc2|0": 2.0, + "harness|openbookqa|0": 1.0, + "harness|arc:easy|0": 1.0, + "harness|mmlu|0": null, + "harness|mmlu_humanities|0": null, + "harness|mmlu_formal_logic|0": 0.0, + "harness|mmlu_high_school_european_history|0": 0.0, + "harness|mmlu_high_school_us_history|0": 0.0, + "harness|mmlu_high_school_world_history|0": 0.0, + "harness|mmlu_international_law|0": 0.0, + "harness|mmlu_jurisprudence|0": 0.0, + "harness|mmlu_logical_fallacies|0": 0.0, + "harness|mmlu_moral_disputes|0": 0.0, + "harness|mmlu_moral_scenarios|0": 0.0, + "harness|mmlu_philosophy|0": 0.0, + "harness|mmlu_prehistory|0": 0.0, + "harness|mmlu_professional_law|0": 0.0, + "harness|mmlu_world_religions|0": 0.0, + "harness|mmlu_other|0": null, + "harness|mmlu_business_ethics|0": 0.0, + "harness|mmlu_clinical_knowledge|0": 0.0, + "harness|mmlu_college_medicine|0": 0.0, + "harness|mmlu_global_facts|0": 0.0, + "harness|mmlu_human_aging|0": 0.0, + "harness|mmlu_management|0": 0.0, + "harness|mmlu_marketing|0": 0.0, + "harness|mmlu_medical_genetics|0": 0.0, + "harness|mmlu_miscellaneous|0": 0.0, + "harness|mmlu_nutrition|0": 0.0, + "harness|mmlu_professional_accounting|0": 0.0, + "harness|mmlu_professional_medicine|0": 0.0, + "harness|mmlu_virology|0": 0.0, + "harness|mmlu_social_sciences|0": null, + "harness|mmlu_econometrics|0": 0.0, + "harness|mmlu_high_school_geography|0": 0.0, + "harness|mmlu_high_school_government_and_politics|0": 0.0, + "harness|mmlu_high_school_macroeconomics|0": 0.0, + "harness|mmlu_high_school_microeconomics|0": 0.0, + "harness|mmlu_high_school_psychology|0": 0.0, + "harness|mmlu_human_sexuality|0": 0.0, + "harness|mmlu_professional_psychology|0": 0.0, + "harness|mmlu_public_relations|0": 0.0, + "harness|mmlu_security_studies|0": 0.0, + "harness|mmlu_sociology|0": 0.0, + "harness|mmlu_us_foreign_policy|0": 0.0, + "harness|mmlu_stem|0": null, + "harness|mmlu_abstract_algebra|0": 0.0, + "harness|mmlu_anatomy|0": 0.0, + "harness|mmlu_astronomy|0": 0.0, + "harness|mmlu_college_biology|0": 0.0, + "harness|mmlu_college_chemistry|0": 0.0, + "harness|mmlu_college_computer_science|0": 0.0, + "harness|mmlu_college_mathematics|0": 0.0, + "harness|mmlu_college_physics|0": 0.0, + "harness|mmlu_computer_security|0": 0.0, + "harness|mmlu_conceptual_physics|0": 0.0, + "harness|mmlu_electrical_engineering|0": 0.0, + "harness|mmlu_elementary_mathematics|0": 0.0, + "harness|mmlu_high_school_biology|0": 0.0, + "harness|mmlu_high_school_chemistry|0": 0.0, + "harness|mmlu_high_school_computer_science|0": 0.0, + "harness|mmlu_high_school_mathematics|0": 0.0, + "harness|mmlu_high_school_physics|0": 0.0, + "harness|mmlu_high_school_statistics|0": 0.0, + "harness|mmlu_machine_learning|0": 0.0, + "harness|piqa|0": 1.0, + "harness|lambada:openai|0": 1.0, + "harness|hellaswag|0": 1.0 + }, + "n-shot": { + "arc_challenge": 0, + "arc_easy": 0, + "boolq": 0, + "hellaswag": 0, + "lambada_openai": 0, + "mmlu": 0, + "mmlu_abstract_algebra": 0, + "mmlu_anatomy": 0, + "mmlu_astronomy": 0, + "mmlu_business_ethics": 0, + "mmlu_clinical_knowledge": 0, + "mmlu_college_biology": 0, + "mmlu_college_chemistry": 0, + "mmlu_college_computer_science": 0, + "mmlu_college_mathematics": 0, + "mmlu_college_medicine": 0, + "mmlu_college_physics": 0, + "mmlu_computer_security": 0, + "mmlu_conceptual_physics": 0, + "mmlu_econometrics": 0, + "mmlu_electrical_engineering": 0, + "mmlu_elementary_mathematics": 0, + "mmlu_formal_logic": 0, + "mmlu_global_facts": 0, + "mmlu_high_school_biology": 0, + "mmlu_high_school_chemistry": 0, + "mmlu_high_school_computer_science": 0, + "mmlu_high_school_european_history": 0, + "mmlu_high_school_geography": 0, + "mmlu_high_school_government_and_politics": 0, + "mmlu_high_school_macroeconomics": 0, + "mmlu_high_school_mathematics": 0, + "mmlu_high_school_microeconomics": 0, + "mmlu_high_school_physics": 0, + "mmlu_high_school_psychology": 0, + "mmlu_high_school_statistics": 0, + "mmlu_high_school_us_history": 0, + "mmlu_high_school_world_history": 0, + "mmlu_human_aging": 0, + "mmlu_human_sexuality": 0, + "mmlu_humanities": 0, + "mmlu_international_law": 0, + "mmlu_jurisprudence": 0, + "mmlu_logical_fallacies": 0, + "mmlu_machine_learning": 0, + "mmlu_management": 0, + "mmlu_marketing": 0, + "mmlu_medical_genetics": 0, + "mmlu_miscellaneous": 0, + "mmlu_moral_disputes": 0, + "mmlu_moral_scenarios": 0, + "mmlu_nutrition": 0, + "mmlu_other": 0, + "mmlu_philosophy": 0, + "mmlu_prehistory": 0, + "mmlu_professional_accounting": 0, + "mmlu_professional_law": 0, + "mmlu_professional_medicine": 0, + "mmlu_professional_psychology": 0, + "mmlu_public_relations": 0, + "mmlu_security_studies": 0, + "mmlu_social_sciences": 0, + "mmlu_sociology": 0, + "mmlu_stem": 0, + "mmlu_us_foreign_policy": 0, + "mmlu_virology": 0, + "mmlu_world_religions": 0, + "openbookqa": 0, + "piqa": 0, + "truthfulqa_mc1": 0, + "truthfulqa_mc2": 0, + "winogrande": 0 + }, + "date": 1714579759.9608762, + "config": { + "model": "hf", + "model_args": "pretrained=unsloth/gemma-7b-bnb-4bit,trust_remote_code=True,dtype=float16,_commit_hash=main", + "batch_size": 2, + "batch_sizes": [], + "device": "cuda", + "use_cache": null, + "limit": null, + "bootstrap_iters": 100000, + "gen_kwargs": null + } +} \ No newline at end of file