|
{ |
|
"config_general": { |
|
"start_date": "2024-02-15T05-17-17.944550", |
|
"start_time": 1707974238.5539687, |
|
"end_time": 1708079048.7124906, |
|
"total_evaluation_time_seconds": 104810.15852189064, |
|
"has_chat_template": false, |
|
"chat_type": null, |
|
"n_gpus": 1, |
|
"accelerate_num_process": null, |
|
"model_sha": "0de9394d54f8975e71838d309de1cb496c894ab9", |
|
"model_dtype": "bfloat16", |
|
"model_memory_footprint": 1184087040, |
|
"model_num_parameters": 592043520, |
|
"model_is_loaded_in_4bit": false, |
|
"model_is_loaded_in_8bit": false, |
|
"model_is_quantized": null, |
|
"model_device": "cuda:0", |
|
"batch_size": 16, |
|
"max_length": 2048, |
|
"max_ctx_length": 2048, |
|
"max_gen_toks": 32, |
|
"model_name": "google/umt5-base", |
|
"job_id": 206, |
|
"model_id": "google/umt5-base_eval_request_False_bfloat16_Original", |
|
"model_base_model": "", |
|
"model_weight_type": "Original", |
|
"model_revision": "main", |
|
"model_private": false, |
|
"model_type": "🟢 : pretrained", |
|
"model_architectures": "UMT5ForConditionalGeneration", |
|
"submitted_time": "2024-02-05T23:18:55Z", |
|
"lm_eval_model_type": "huggingface", |
|
"eval_version": "1.0.0" |
|
}, |
|
"results": { |
|
"all_grouped_average": 0.04776457359664892, |
|
"all_grouped_npm": -0.38758303220535195, |
|
"all_grouped": { |
|
"enem_challenge": 0.02939118264520644, |
|
"bluex": 0.022253129346314324, |
|
"oab_exams": 0.08337129840546698, |
|
"assin2_rte": 0.008049597487606372, |
|
"assin2_sts": 0.11702145458274152, |
|
"faquad_nli": 0.005177993527508091, |
|
"sparrow_pt": 0.06908735918169881 |
|
}, |
|
"all": { |
|
"harness|enem_challenge|enem_challenge|None|3": 0.02939118264520644, |
|
"harness|bluex|bluex|None|3": 0.022253129346314324, |
|
"harness|oab_exams|oab_exams|None|3": 0.08337129840546698, |
|
"harness|assin2_rte|assin2_rte|None|15": 0.008049597487606372, |
|
"harness|assin2_sts|assin2_sts|None|15": 0.11702145458274152, |
|
"harness|faquad_nli|faquad_nli|None|15": 0.005177993527508091, |
|
"harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": 0.0, |
|
"harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": 0.24175824175824176, |
|
"harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": 0.0, |
|
"harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": 0.03459119496855346 |
|
}, |
|
"harness|enem_challenge|enem_challenge|None|3": { |
|
"acc,all": 0.02939118264520644, |
|
"acc,exam_id__2015": 0.03361344537815126, |
|
"acc,exam_id__2012": 0.034482758620689655, |
|
"acc,exam_id__2017": 0.02586206896551724, |
|
"acc,exam_id__2009": 0.017391304347826087, |
|
"acc,exam_id__2023": 0.037037037037037035, |
|
"acc,exam_id__2016_2": 0.024390243902439025, |
|
"acc,exam_id__2010": 0.05128205128205128, |
|
"acc,exam_id__2014": 0.01834862385321101, |
|
"acc,exam_id__2013": 0.018518518518518517, |
|
"acc,exam_id__2022": 0.05263157894736842, |
|
"acc,exam_id__2011": 0.02564102564102564, |
|
"acc,exam_id__2016": 0.008264462809917356, |
|
"main_score": 0.02939118264520644 |
|
}, |
|
"harness|bluex|bluex|None|3": { |
|
"acc,all": 0.022253129346314324, |
|
"acc,exam_id__UNICAMP_2022": 0.0, |
|
"acc,exam_id__UNICAMP_2019": 0.06, |
|
"acc,exam_id__UNICAMP_2023": 0.023255813953488372, |
|
"acc,exam_id__UNICAMP_2018": 0.0, |
|
"acc,exam_id__UNICAMP_2020": 0.05454545454545454, |
|
"acc,exam_id__USP_2024": 0.0, |
|
"acc,exam_id__UNICAMP_2021_2": 0.0, |
|
"acc,exam_id__USP_2022": 0.0, |
|
"acc,exam_id__USP_2019": 0.0, |
|
"acc,exam_id__USP_2021": 0.019230769230769232, |
|
"acc,exam_id__USP_2023": 0.09090909090909091, |
|
"acc,exam_id__UNICAMP_2021_1": 0.021739130434782608, |
|
"acc,exam_id__UNICAMP_2024": 0.022222222222222223, |
|
"acc,exam_id__USP_2020": 0.017857142857142856, |
|
"acc,exam_id__USP_2018": 0.018518518518518517, |
|
"main_score": 0.022253129346314324 |
|
}, |
|
"harness|oab_exams|oab_exams|None|3": { |
|
"acc,all": 0.08337129840546698, |
|
"acc,exam_id__2012-06a": 0.1, |
|
"acc,exam_id__2015-17": 0.10256410256410256, |
|
"acc,exam_id__2012-06": 0.0625, |
|
"acc,exam_id__2014-15": 0.07692307692307693, |
|
"acc,exam_id__2016-20": 0.075, |
|
"acc,exam_id__2013-10": 0.0875, |
|
"acc,exam_id__2012-07": 0.025, |
|
"acc,exam_id__2011-03": 0.10101010101010101, |
|
"acc,exam_id__2011-05": 0.1125, |
|
"acc,exam_id__2016-19": 0.038461538461538464, |
|
"acc,exam_id__2017-23": 0.1, |
|
"acc,exam_id__2017-22": 0.0625, |
|
"acc,exam_id__2018-25": 0.1, |
|
"acc,exam_id__2014-13": 0.05, |
|
"acc,exam_id__2017-24": 0.0625, |
|
"acc,exam_id__2010-01": 0.12941176470588237, |
|
"acc,exam_id__2014-14": 0.125, |
|
"acc,exam_id__2011-04": 0.0625, |
|
"acc,exam_id__2013-12": 0.0625, |
|
"acc,exam_id__2015-16": 0.1, |
|
"acc,exam_id__2016-21": 0.075, |
|
"acc,exam_id__2013-11": 0.075, |
|
"acc,exam_id__2012-08": 0.025, |
|
"acc,exam_id__2016-20a": 0.1375, |
|
"acc,exam_id__2012-09": 0.07792207792207792, |
|
"acc,exam_id__2010-02": 0.11, |
|
"acc,exam_id__2015-18": 0.1, |
|
"main_score": 0.08337129840546698 |
|
}, |
|
"harness|assin2_rte|assin2_rte|None|15": { |
|
"f1_macro,all": 0.008049597487606372, |
|
"acc,all": 0.006127450980392157, |
|
"main_score": 0.008049597487606372 |
|
}, |
|
"harness|assin2_sts|assin2_sts|None|15": { |
|
"pearson,all": 0.11702145458274152, |
|
"mse,all": 4.536915664667917, |
|
"main_score": 0.11702145458274152 |
|
}, |
|
"harness|faquad_nli|faquad_nli|None|15": { |
|
"f1_macro,all": 0.005177993527508091, |
|
"acc,all": 0.006153846153846154, |
|
"main_score": 0.005177993527508091 |
|
}, |
|
"harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": { |
|
"f1_macro,all": 0.0, |
|
"acc,all": 0.0, |
|
"main_score": 0.0 |
|
}, |
|
"harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": { |
|
"f1_macro,all": 0.24175824175824176, |
|
"acc,all": 0.528, |
|
"main_score": 0.24175824175824176 |
|
}, |
|
"harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": { |
|
"f1_macro,all": 0.0, |
|
"acc,all": 0.0, |
|
"main_score": 0.0 |
|
}, |
|
"harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": { |
|
"f1_macro,all": 0.03459119496855346, |
|
"acc,all": 0.022, |
|
"main_score": 0.03459119496855346 |
|
} |
|
}, |
|
"config_tasks": { |
|
"harness|enem_challenge|enem_challenge": "LM Harness task", |
|
"harness|bluex|bluex": "LM Harness task", |
|
"harness|oab_exams|oab_exams": "LM Harness task", |
|
"harness|assin2_rte|assin2_rte": "LM Harness task", |
|
"harness|assin2_sts|assin2_sts": "LM Harness task", |
|
"harness|faquad_nli|faquad_nli": "LM Harness task", |
|
"harness|sparrow_pt|sparrow_emotion-2021-cortiz-por": "LM Harness task", |
|
"harness|sparrow_pt|sparrow_hate-2019-fortuna-por": "LM Harness task", |
|
"harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por": "LM Harness task", |
|
"harness|sparrow_pt|sparrow_sentiment-2018-brum-por": "LM Harness task" |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|enem_challenge|enem_challenge": 1.0, |
|
"harness|bluex|bluex": 1.0, |
|
"harness|oab_exams|oab_exams": 1.4, |
|
"harness|assin2_rte|assin2_rte": 1.0, |
|
"harness|assin2_sts|assin2_sts": 1.0, |
|
"harness|faquad_nli|faquad_nli": 1.0, |
|
"harness|sparrow_pt|sparrow_emotion-2021-cortiz-por": 1.0, |
|
"harness|sparrow_pt|sparrow_hate-2019-fortuna-por": 1.0, |
|
"harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por": 1.0, |
|
"harness|sparrow_pt|sparrow_sentiment-2018-brum-por": 1.0 |
|
}, |
|
"summary_tasks": { |
|
"harness|enem_challenge|enem_challenge|None|3": { |
|
"sample_size": 1429, |
|
"truncated": 2, |
|
"non_truncated": 1427, |
|
"padded": 0, |
|
"non_padded": 1429, |
|
"fewshots_truncated": 2, |
|
"mean_seq_length": 1207.7872638208537, |
|
"min_seq_length": 1012, |
|
"max_seq_length": 2293, |
|
"max_ctx_length": 2048, |
|
"max_gen_toks": 32, |
|
"mean_original_fewshots_size": 3.0, |
|
"mean_effective_fewshot_size": 2.998600419874038 |
|
}, |
|
"harness|bluex|bluex|None|3": { |
|
"sample_size": 719, |
|
"truncated": 0, |
|
"non_truncated": 719, |
|
"padded": 0, |
|
"non_padded": 719, |
|
"fewshots_truncated": 0, |
|
"mean_seq_length": 1164.269819193324, |
|
"min_seq_length": 890, |
|
"max_seq_length": 1696, |
|
"max_ctx_length": 2048, |
|
"max_gen_toks": 32, |
|
"mean_original_fewshots_size": 3.0, |
|
"mean_effective_fewshot_size": 3.0 |
|
}, |
|
"harness|oab_exams|oab_exams|None|3": { |
|
"sample_size": 2195, |
|
"truncated": 0, |
|
"non_truncated": 2195, |
|
"padded": 0, |
|
"non_padded": 2195, |
|
"fewshots_truncated": 0, |
|
"mean_seq_length": 953.780410022779, |
|
"min_seq_length": 762, |
|
"max_seq_length": 1264, |
|
"max_ctx_length": 2048, |
|
"max_gen_toks": 32, |
|
"mean_original_fewshots_size": 3.0, |
|
"mean_effective_fewshot_size": 3.0 |
|
}, |
|
"harness|assin2_rte|assin2_rte|None|15": { |
|
"sample_size": 2448, |
|
"truncated": 0, |
|
"non_truncated": 2448, |
|
"padded": 0, |
|
"non_padded": 2448, |
|
"fewshots_truncated": 0, |
|
"mean_seq_length": 928.1200980392157, |
|
"min_seq_length": 912, |
|
"max_seq_length": 976, |
|
"max_ctx_length": 2048, |
|
"max_gen_toks": 32, |
|
"mean_original_fewshots_size": 15.0, |
|
"mean_effective_fewshot_size": 15.0 |
|
}, |
|
"harness|assin2_sts|assin2_sts|None|15": { |
|
"sample_size": 2448, |
|
"truncated": 0, |
|
"non_truncated": 2448, |
|
"padded": 0, |
|
"non_padded": 2448, |
|
"fewshots_truncated": 0, |
|
"mean_seq_length": 1196.1200980392157, |
|
"min_seq_length": 1180, |
|
"max_seq_length": 1244, |
|
"max_ctx_length": 2048, |
|
"max_gen_toks": 32, |
|
"mean_original_fewshots_size": 15.0, |
|
"mean_effective_fewshot_size": 15.0 |
|
}, |
|
"harness|faquad_nli|faquad_nli|None|15": { |
|
"sample_size": 650, |
|
"truncated": 0, |
|
"non_truncated": 650, |
|
"padded": 0, |
|
"non_padded": 650, |
|
"fewshots_truncated": 0, |
|
"mean_seq_length": 1074.863076923077, |
|
"min_seq_length": 1040, |
|
"max_seq_length": 1144, |
|
"max_ctx_length": 2048, |
|
"max_gen_toks": 32, |
|
"mean_original_fewshots_size": 15.0, |
|
"mean_effective_fewshot_size": 15.0 |
|
}, |
|
"harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": { |
|
"sample_size": 500, |
|
"truncated": 0, |
|
"non_truncated": 500, |
|
"padded": 0, |
|
"non_padded": 500, |
|
"fewshots_truncated": 0, |
|
"mean_seq_length": 1321.456, |
|
"min_seq_length": 1306, |
|
"max_seq_length": 1343, |
|
"max_ctx_length": 2048, |
|
"max_gen_toks": 32, |
|
"mean_original_fewshots_size": 25.0, |
|
"mean_effective_fewshot_size": 25.0 |
|
}, |
|
"harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": { |
|
"sample_size": 500, |
|
"truncated": 0, |
|
"non_truncated": 500, |
|
"padded": 0, |
|
"non_padded": 500, |
|
"fewshots_truncated": 0, |
|
"mean_seq_length": 1305.35, |
|
"min_seq_length": 1286, |
|
"max_seq_length": 1338, |
|
"max_ctx_length": 2048, |
|
"max_gen_toks": 32, |
|
"mean_original_fewshots_size": 25.0, |
|
"mean_effective_fewshot_size": 25.0 |
|
}, |
|
"harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": { |
|
"sample_size": 500, |
|
"truncated": 0, |
|
"non_truncated": 500, |
|
"padded": 0, |
|
"non_padded": 500, |
|
"fewshots_truncated": 0, |
|
"mean_seq_length": 1178.136, |
|
"min_seq_length": 1164, |
|
"max_seq_length": 1215, |
|
"max_ctx_length": 2048, |
|
"max_gen_toks": 32, |
|
"mean_original_fewshots_size": 25.0, |
|
"mean_effective_fewshot_size": 25.0 |
|
}, |
|
"harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": { |
|
"sample_size": 500, |
|
"truncated": 0, |
|
"non_truncated": 500, |
|
"padded": 0, |
|
"non_padded": 500, |
|
"fewshots_truncated": 0, |
|
"mean_seq_length": 1291.334, |
|
"min_seq_length": 1277, |
|
"max_seq_length": 1322, |
|
"max_ctx_length": 2048, |
|
"max_gen_toks": 32, |
|
"mean_original_fewshots_size": 25.0, |
|
"mean_effective_fewshot_size": 25.0 |
|
} |
|
}, |
|
"summary_general": { |
|
"truncated": 2, |
|
"non_truncated": 11887, |
|
"padded": 0, |
|
"non_padded": 11889, |
|
"fewshots_truncated": 2 |
|
} |
|
} |