{ "config_general": { "start_date": "2024-02-14T17-44-29.767283", "start_time": 1707932670.347448, "end_time": 1707936677.2206705, "total_evaluation_time_seconds": 4006.873222351074, "has_chat_template": false, "chat_type": null, "n_gpus": 1, "accelerate_num_process": null, "model_sha": "372b1c08d9b5b0fc18ce86bbf294930e26e66ed5", "model_dtype": "float16", "model_memory_footprint": 13857212480, "model_num_parameters": 6857302016, "model_is_loaded_in_4bit": false, "model_is_loaded_in_8bit": false, "model_is_quantized": null, "model_device": "cuda:0", "batch_size": 16, "max_length": 2048, "max_ctx_length": 2016, "max_gen_toks": 32, "model_name": "EleutherAI/pythia-6.9b-deduped", "job_id": 198, "model_id": "EleutherAI/pythia-6.9b-deduped_eval_request_False_float16_Original", "model_base_model": "", "model_weight_type": "Original", "model_revision": "main", "model_private": false, "model_type": "🟢 : pretrained", "model_architectures": "GPTNeoXForCausalLM", "submitted_time": "2024-02-05T23:11:48Z", "lm_eval_model_type": "huggingface", "eval_version": "1.0.0" }, "results": { "all_grouped_average": 0.24349621884502046, "all_grouped_npm": -0.056880866402642415, "all_grouped": { "enem_challenge": 0.20503848845346395, "bluex": 0.21835883171070933, "oab_exams": 0.2669703872437358, "assin2_rte": 0.33514630774633175, "assin2_sts": 0.015459703136524651, "faquad_nli": 0.5032594590990455, "sparrow_pt": 0.16024035452533222 }, "all": { "harness|enem_challenge|enem_challenge|None|3": 0.20503848845346395, "harness|bluex|bluex|None|3": 0.21835883171070933, "harness|oab_exams|oab_exams|None|3": 0.2669703872437358, "harness|assin2_rte|assin2_rte|None|15": 0.33514630774633175, "harness|assin2_sts|assin2_sts|None|15": 0.015459703136524651, "harness|faquad_nli|faquad_nli|None|15": 0.5032594590990455, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": 0.05071323767657713, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": 0.3932038834951456, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": 0.0671462829736211, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": 0.12989801395598496 }, "harness|enem_challenge|enem_challenge|None|3": { "acc,all": 0.20503848845346395, "acc,exam_id__2015": 0.17647058823529413, "acc,exam_id__2012": 0.25862068965517243, "acc,exam_id__2017": 0.22413793103448276, "acc,exam_id__2009": 0.22608695652173913, "acc,exam_id__2023": 0.2, "acc,exam_id__2016_2": 0.17886178861788618, "acc,exam_id__2010": 0.18803418803418803, "acc,exam_id__2014": 0.21100917431192662, "acc,exam_id__2013": 0.2037037037037037, "acc,exam_id__2022": 0.17293233082706766, "acc,exam_id__2011": 0.21367521367521367, "acc,exam_id__2016": 0.21487603305785125, "main_score": 0.20503848845346395 }, "harness|bluex|bluex|None|3": { "acc,all": 0.21835883171070933, "acc,exam_id__UNICAMP_2022": 0.3076923076923077, "acc,exam_id__UNICAMP_2019": 0.16, "acc,exam_id__UNICAMP_2023": 0.3023255813953488, "acc,exam_id__UNICAMP_2018": 0.24074074074074073, "acc,exam_id__UNICAMP_2020": 0.2545454545454545, "acc,exam_id__USP_2024": 0.21951219512195122, "acc,exam_id__UNICAMP_2021_2": 0.1568627450980392, "acc,exam_id__USP_2022": 0.1836734693877551, "acc,exam_id__USP_2019": 0.25, "acc,exam_id__USP_2021": 0.23076923076923078, "acc,exam_id__USP_2023": 0.13636363636363635, "acc,exam_id__UNICAMP_2021_1": 0.21739130434782608, "acc,exam_id__UNICAMP_2024": 0.24444444444444444, "acc,exam_id__USP_2020": 0.25, "acc,exam_id__USP_2018": 0.14814814814814814, "main_score": 0.21835883171070933 }, "harness|oab_exams|oab_exams|None|3": { "acc,all": 0.2669703872437358, "acc,exam_id__2012-06a": 0.1875, "acc,exam_id__2015-17": 0.2564102564102564, "acc,exam_id__2012-06": 0.2375, "acc,exam_id__2014-15": 0.2692307692307692, "acc,exam_id__2016-20": 0.225, "acc,exam_id__2013-10": 0.2625, "acc,exam_id__2012-07": 0.2625, "acc,exam_id__2011-03": 0.25252525252525254, "acc,exam_id__2011-05": 0.1875, "acc,exam_id__2016-19": 0.21794871794871795, "acc,exam_id__2017-23": 0.2875, "acc,exam_id__2017-22": 0.2375, "acc,exam_id__2018-25": 0.275, "acc,exam_id__2014-13": 0.3125, "acc,exam_id__2017-24": 0.3, "acc,exam_id__2010-01": 0.29411764705882354, "acc,exam_id__2014-14": 0.3125, "acc,exam_id__2011-04": 0.3875, "acc,exam_id__2013-12": 0.1875, "acc,exam_id__2015-16": 0.2375, "acc,exam_id__2016-21": 0.3375, "acc,exam_id__2013-11": 0.225, "acc,exam_id__2012-08": 0.275, "acc,exam_id__2016-20a": 0.2375, "acc,exam_id__2012-09": 0.3116883116883117, "acc,exam_id__2010-02": 0.36, "acc,exam_id__2015-18": 0.25, "main_score": 0.2669703872437358 }, "harness|assin2_rte|assin2_rte|None|15": { "f1_macro,all": 0.33514630774633175, "acc,all": 0.5008169934640523, "main_score": 0.33514630774633175 }, "harness|assin2_sts|assin2_sts|None|15": { "pearson,all": 0.015459703136524651, "mse,all": 2.1141911764705887, "main_score": 0.015459703136524651 }, "harness|faquad_nli|faquad_nli|None|15": { "f1_macro,all": 0.5032594590990455, "acc,all": 0.7707692307692308, "main_score": 0.5032594590990455 }, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": { "f1_macro,all": 0.05071323767657713, "acc,all": 0.1, "main_score": 0.05071323767657713 }, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": { "f1_macro,all": 0.3932038834951456, "acc,all": 0.648, "main_score": 0.3932038834951456 }, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": { "f1_macro,all": 0.0671462829736211, "acc,all": 0.112, "main_score": 0.0671462829736211 }, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": { "f1_macro,all": 0.12989801395598496, "acc,all": 0.242, "main_score": 0.12989801395598496 } }, "config_tasks": { "harness|enem_challenge|enem_challenge": "LM Harness task", "harness|bluex|bluex": "LM Harness task", "harness|oab_exams|oab_exams": "LM Harness task", "harness|assin2_rte|assin2_rte": "LM Harness task", "harness|assin2_sts|assin2_sts": "LM Harness task", "harness|faquad_nli|faquad_nli": "LM Harness task", "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por": "LM Harness task", "harness|sparrow_pt|sparrow_hate-2019-fortuna-por": "LM Harness task", "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por": "LM Harness task", "harness|sparrow_pt|sparrow_sentiment-2018-brum-por": "LM Harness task" }, "versions": { "all": 0, "harness|enem_challenge|enem_challenge": 1.0, "harness|bluex|bluex": 1.0, "harness|oab_exams|oab_exams": 1.4, "harness|assin2_rte|assin2_rte": 1.0, "harness|assin2_sts|assin2_sts": 1.0, "harness|faquad_nli|faquad_nli": 1.0, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por": 1.0, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por": 1.0, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por": 1.0, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por": 1.0 }, "summary_tasks": { "harness|enem_challenge|enem_challenge|None|3": { "sample_size": 1429, "truncated": 15, "non_truncated": 1414, "padded": 0, "non_padded": 1429, "fewshots_truncated": 16, "mean_seq_length": 1542.0517844646606, "min_seq_length": 1291, "max_seq_length": 2503, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 3.0, "mean_effective_fewshot_size": 2.9888033589923024 }, "harness|bluex|bluex|None|3": { "sample_size": 719, "truncated": 2, "non_truncated": 717, "padded": 0, "non_padded": 719, "fewshots_truncated": 2, "mean_seq_length": 1324.076495132128, "min_seq_length": 953, "max_seq_length": 2108, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 3.0, "mean_effective_fewshot_size": 2.9972183588317107 }, "harness|oab_exams|oab_exams|None|3": { "sample_size": 2195, "truncated": 0, "non_truncated": 2195, "padded": 0, "non_padded": 2195, "fewshots_truncated": 0, "mean_seq_length": 1324.5503416856493, "min_seq_length": 1061, "max_seq_length": 1789, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 3.0, "mean_effective_fewshot_size": 3.0 }, "harness|assin2_rte|assin2_rte|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1259.0061274509803, "min_seq_length": 1236, "max_seq_length": 1325, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 15.0, "mean_effective_fewshot_size": 15.0 }, "harness|assin2_sts|assin2_sts|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1392.0061274509803, "min_seq_length": 1369, "max_seq_length": 1458, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 15.0, "mean_effective_fewshot_size": 15.0 }, "harness|faquad_nli|faquad_nli|None|15": { "sample_size": 650, "truncated": 0, "non_truncated": 650, "padded": 0, "non_padded": 650, "fewshots_truncated": 0, "mean_seq_length": 1540.8153846153846, "min_seq_length": 1487, "max_seq_length": 1650, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 15.0, "mean_effective_fewshot_size": 15.0 }, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1744.524, "min_seq_length": 1723, "max_seq_length": 1777, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 25.0, "mean_effective_fewshot_size": 25.0 }, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1716.684, "min_seq_length": 1693, "max_seq_length": 1754, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 25.0, "mean_effective_fewshot_size": 25.0 }, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1425.294, "min_seq_length": 1408, "max_seq_length": 1461, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 25.0, "mean_effective_fewshot_size": 25.0 }, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1577.602, "min_seq_length": 1560, "max_seq_length": 1607, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 25.0, "mean_effective_fewshot_size": 25.0 } }, "summary_general": { "truncated": 17, "non_truncated": 11872, "padded": 0, "non_padded": 11889, "fewshots_truncated": 18 } }