Dataset Preview
Viewer
The full dataset viewer is not available (click to read why). Only showing a preview of the rows.
An error occurred while generating the dataset
Error code:   UnexpectedError

Need help to make the dataset viewer work? Open a discussion for direct support.

config_general
dict
results
dict
config_tasks
dict
versions
dict
summary_tasks
dict
summary_general
dict
{ "start_date": "2024-02-09T08-03-26.811325", "start_time": 1707465807.3430393, "end_time": 1707476377.5033355, "total_evaluation_time_seconds": 10570.160296201706, "has_chat_template": false, "chat_type": null, "n_gpus": 1, "accelerate_num_process": null, "model_sha": "a443ece4183f7cec2a6b318e808867bf156a87b6", "model_dtype": "bfloat16", "model_memory_footprint": 74921849856, "model_num_parameters": 34388917248, "model_is_loaded_in_4bit": false, "model_is_loaded_in_8bit": false, "model_is_quantized": null, "model_device": "cuda:1", "batch_size": 2, "max_length": 4096, "max_ctx_length": 4064, "max_gen_toks": 32, "model_name": "01-ai/Yi-34B-200K", "job_id": 122, "model_id": "01-ai/Yi-34B-200K_eval_request_False_bfloat16_Original", "model_base_model": "", "model_weight_type": "Original", "model_revision": "main", "model_private": false, "model_type": "🟒 : pretrained", "model_architectures": "LlamaForCausalLM", "submitted_time": "2024-02-05T23:18:19", "lm_eval_model_type": "huggingface", "eval_version": "1.0.0" }
{ "all_grouped_average": 0.6356464211312362, "all_grouped_npm": 0.4967380342980293, "all_grouped": { "enem_challenge": 0.7186843946815955, "bluex": 0.6634214186369958, "oab_exams": 0.571753986332574, "assin2_rte": 0.7858403678133732, "assin2_sts": 0.5583683246827316, "faquad_nli": 0.7800338409475465, "sparrow_pt": 0.37142261482383704 }, "all": { "harness|enem_challenge|enem_challenge|None|3": 0.7186843946815955, "harness|bluex|bluex|None|3": 0.6634214186369958, "harness|oab_exams|oab_exams|None|3": 0.571753986332574, "harness|assin2_rte|assin2_rte|None|15": 0.7858403678133732, "harness|assin2_sts|assin2_sts|None|15": 0.5583683246827316, "harness|faquad_nli|faquad_nli|None|15": 0.7800338409475465, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": 0.08900639474262209, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": 0.5285560344827587, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": 0.4786461875712873, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": 0.3894818424986802 }, "harness|enem_challenge|enem_challenge|None|3": { "acc,all": 0.7186843946815955, "acc,exam_id__2023": 0.7555555555555555, "acc,exam_id__2009": 0.7391304347826086, "acc,exam_id__2013": 0.6944444444444444, "acc,exam_id__2011": 0.7863247863247863, "acc,exam_id__2014": 0.7431192660550459, "acc,exam_id__2016_2": 0.6747967479674797, "acc,exam_id__2022": 0.6691729323308271, "acc,exam_id__2017": 0.7413793103448276, "acc,exam_id__2016": 0.6859504132231405, "acc,exam_id__2015": 0.6974789915966386, "acc,exam_id__2012": 0.7327586206896551, "acc,exam_id__2010": 0.7094017094017094, "main_score": 0.7186843946815955 }, "harness|bluex|bluex|None|3": { "acc,all": 0.6634214186369958, "acc,exam_id__USP_2018": 0.6296296296296297, "acc,exam_id__USP_2024": 0.7804878048780488, "acc,exam_id__USP_2021": 0.5769230769230769, "acc,exam_id__UNICAMP_2021_1": 0.7608695652173914, "acc,exam_id__USP_2020": 0.5892857142857143, "acc,exam_id__USP_2022": 0.6938775510204082, "acc,exam_id__UNICAMP_2022": 0.717948717948718, "acc,exam_id__UNICAMP_2020": 0.6, "acc,exam_id__USP_2023": 0.7954545454545454, "acc,exam_id__USP_2019": 0.575, "acc,exam_id__UNICAMP_2023": 0.6744186046511628, "acc,exam_id__UNICAMP_2024": 0.7111111111111111, "acc,exam_id__UNICAMP_2021_2": 0.6666666666666666, "acc,exam_id__UNICAMP_2019": 0.64, "acc,exam_id__UNICAMP_2018": 0.6111111111111112, "main_score": 0.6634214186369958 }, "harness|oab_exams|oab_exams|None|3": { "acc,all": 0.571753986332574, "acc,exam_id__2010-01": 0.5058823529411764, "acc,exam_id__2017-24": 0.6375, "acc,exam_id__2016-21": 0.5125, "acc,exam_id__2016-19": 0.5897435897435898, "acc,exam_id__2013-12": 0.6125, "acc,exam_id__2015-17": 0.6410256410256411, "acc,exam_id__2012-09": 0.5194805194805194, "acc,exam_id__2014-14": 0.6625, "acc,exam_id__2013-10": 0.6125, "acc,exam_id__2012-06a": 0.6125, "acc,exam_id__2016-20a": 0.5, "acc,exam_id__2018-25": 0.6, "acc,exam_id__2011-04": 0.4375, "acc,exam_id__2011-05": 0.5375, "acc,exam_id__2017-22": 0.5625, "acc,exam_id__2014-13": 0.5125, "acc,exam_id__2012-08": 0.5125, "acc,exam_id__2013-11": 0.6375, "acc,exam_id__2011-03": 0.5555555555555556, "acc,exam_id__2012-07": 0.5375, "acc,exam_id__2015-16": 0.525, "acc,exam_id__2014-15": 0.6666666666666666, "acc,exam_id__2010-02": 0.55, "acc,exam_id__2016-20": 0.6125, "acc,exam_id__2017-23": 0.55, "acc,exam_id__2012-06": 0.6, "acc,exam_id__2015-18": 0.65, "main_score": 0.571753986332574 }, "harness|assin2_rte|assin2_rte|None|15": { "f1_macro,all": 0.7858403678133732, "acc,all": 0.7945261437908496, "main_score": 0.7858403678133732 }, "harness|assin2_sts|assin2_sts|None|15": { "pearson,all": 0.5583683246827316, "mse,all": 1.6300571895424838, "main_score": 0.5583683246827316 }, "harness|faquad_nli|faquad_nli|None|15": { "f1_macro,all": 0.7800338409475465, "acc,all": 0.8384615384615385, "main_score": 0.7800338409475465 }, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": { "f1_macro,all": 0.08900639474262209, "acc,all": 0.138, "main_score": 0.08900639474262209 }, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": { "f1_macro,all": 0.5285560344827587, "acc,all": 0.664, "main_score": 0.5285560344827587 }, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": { "f1_macro,all": 0.4786461875712873, "acc,all": 0.544, "main_score": 0.4786461875712873 }, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": { "f1_macro,all": 0.3894818424986802, "acc,all": 0.408, "main_score": 0.3894818424986802 } }
{ "harness|enem_challenge|enem_challenge": "LM Harness task", "harness|bluex|bluex": "LM Harness task", "harness|oab_exams|oab_exams": "LM Harness task", "harness|assin2_rte|assin2_rte": "LM Harness task", "harness|assin2_sts|assin2_sts": "LM Harness task", "harness|faquad_nli|faquad_nli": "LM Harness task", "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por": "LM Harness task", "harness|sparrow_pt|sparrow_hate-2019-fortuna-por": "LM Harness task", "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por": "LM Harness task", "harness|sparrow_pt|sparrow_sentiment-2018-brum-por": "LM Harness task" }
{ "all": 0, "harness|enem_challenge|enem_challenge": 1, "harness|bluex|bluex": 1, "harness|oab_exams|oab_exams": 1.4, "harness|assin2_rte|assin2_rte": 1, "harness|assin2_sts|assin2_sts": 1, "harness|faquad_nli|faquad_nli": 1, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por": 1, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por": 1, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por": 1, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por": 1 }
{ "harness|enem_challenge|enem_challenge|None|3": { "sample_size": 1429, "truncated": 0, "non_truncated": 1429, "padded": 0, "non_padded": 1429, "fewshots_truncated": 0, "mean_seq_length": 1749.6696990902728, "min_seq_length": 1455, "max_seq_length": 2765, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|bluex|bluex|None|3": { "sample_size": 719, "truncated": 0, "non_truncated": 719, "padded": 0, "non_padded": 719, "fewshots_truncated": 0, "mean_seq_length": 1473.1738525730182, "min_seq_length": 1061, "max_seq_length": 2355, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|oab_exams|oab_exams|None|3": { "sample_size": 2195, "truncated": 0, "non_truncated": 2195, "padded": 0, "non_padded": 2195, "fewshots_truncated": 0, "mean_seq_length": 1501.7266514806379, "min_seq_length": 1201, "max_seq_length": 2039, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|assin2_rte|assin2_rte|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1376.5265522875818, "min_seq_length": 1352, "max_seq_length": 1448, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|assin2_sts|assin2_sts|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1558.5265522875818, "min_seq_length": 1534, "max_seq_length": 1630, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|faquad_nli|faquad_nli|None|15": { "sample_size": 650, "truncated": 0, "non_truncated": 650, "padded": 0, "non_padded": 650, "fewshots_truncated": 0, "mean_seq_length": 1721.1292307692308, "min_seq_length": 1661, "max_seq_length": 1854, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1852.022, "min_seq_length": 1827, "max_seq_length": 1894, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1902.65, "min_seq_length": 1876, "max_seq_length": 1944, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1546.77, "min_seq_length": 1527, "max_seq_length": 1591, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1714.068, "min_seq_length": 1694, "max_seq_length": 1755, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 } }
{ "truncated": 0, "non_truncated": 11889, "padded": 0, "non_padded": 11889, "fewshots_truncated": 0 }
{ "start_date": "2024-04-17T23-49-34.862700", "start_time": 1713397789.3955784, "end_time": 1713409575.0003965, "total_evaluation_time_seconds": 11785.604818105698, "has_chat_template": false, "chat_type": null, "n_gpus": 1, "accelerate_num_process": null, "model_sha": "f2606e099a34dc448b83854629a7e5a3ed53a781", "model_dtype": "bfloat16", "model_memory_footprint": 74921849856, "model_num_parameters": 34388917248, "model_is_loaded_in_4bit": null, "model_is_loaded_in_8bit": null, "model_is_quantized": null, "model_device": "cuda:0", "batch_size": 2, "max_length": 2560, "max_ctx_length": 2528, "max_gen_toks": 32, "model_name": "01-ai/Yi-34B-200K", "job_id": 480, "model_id": "01-ai/Yi-34B-200K_eval_request_False_bfloat16_Original", "model_base_model": "", "model_weight_type": "Original", "model_revision": "main", "model_private": false, "model_type": "🟒 : pretrained", "model_architectures": "LlamaForCausalLM", "submitted_time": "2024-02-05T23:18:19", "lm_eval_model_type": "huggingface", "eval_version": "1.1.0" }
{ "all_grouped_average": 0.6881385400726399, "all_grouped_npm": 0.523233175505688, "all_grouped": { "enem_challenge": 0.7172848145556333, "bluex": 0.6481223922114048, "oab_exams": 0.5517084282460136, "assin2_rte": 0.9097218456052794, "assin2_sts": 0.7390390977418284, "faquad_nli": 0.49676238738738737, "hatebr_offensive": 0.8117947554592124, "portuguese_hate_speech": 0.7007076712295253, "tweetsentbr": 0.6181054682174745 }, "all": { "harness|enem_challenge|enem_challenge|None|3": 0.7172848145556333, "harness|bluex|bluex|None|3": 0.6481223922114048, "harness|oab_exams|oab_exams|None|3": 0.5517084282460136, "harness|assin2_rte|assin2_rte|None|15": 0.9097218456052794, "harness|assin2_sts|assin2_sts|None|15": 0.7390390977418284, "harness|faquad_nli|faquad_nli|None|15": 0.49676238738738737, "harness|hatebr_offensive|hatebr_offensive|None|25": 0.8117947554592124, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": 0.7007076712295253, "harness|tweetsentbr|tweetsentbr|None|25": 0.6181054682174745 }, "harness|enem_challenge|enem_challenge|None|3": { "acc,all": 0.7172848145556333, "acc,exam_id__2016": 0.6611570247933884, "acc,exam_id__2013": 0.6759259259259259, "acc,exam_id__2015": 0.7058823529411765, "acc,exam_id__2017": 0.6810344827586207, "acc,exam_id__2023": 0.762962962962963, "acc,exam_id__2011": 0.8034188034188035, "acc,exam_id__2010": 0.7350427350427351, "acc,exam_id__2014": 0.7431192660550459, "acc,exam_id__2012": 0.7413793103448276, "acc,exam_id__2016_2": 0.6991869918699187, "acc,exam_id__2022": 0.6842105263157895, "acc,exam_id__2009": 0.7130434782608696, "main_score": 0.7172848145556333 }, "harness|bluex|bluex|None|3": { "acc,all": 0.6481223922114048, "acc,exam_id__UNICAMP_2019": 0.66, "acc,exam_id__USP_2018": 0.5925925925925926, "acc,exam_id__USP_2020": 0.6428571428571429, "acc,exam_id__UNICAMP_2021_1": 0.5434782608695652, "acc,exam_id__USP_2024": 0.8292682926829268, "acc,exam_id__UNICAMP_2020": 0.5454545454545454, "acc,exam_id__USP_2019": 0.55, "acc,exam_id__UNICAMP_2022": 0.6923076923076923, "acc,exam_id__UNICAMP_2023": 0.7209302325581395, "acc,exam_id__USP_2022": 0.7142857142857143, "acc,exam_id__UNICAMP_2018": 0.5370370370370371, "acc,exam_id__USP_2021": 0.6538461538461539, "acc,exam_id__UNICAMP_2024": 0.7111111111111111, "acc,exam_id__UNICAMP_2021_2": 0.6470588235294118, "acc,exam_id__USP_2023": 0.75, "main_score": 0.6481223922114048 }, "harness|oab_exams|oab_exams|None|3": { "acc,all": 0.5517084282460136, "acc,exam_id__2015-17": 0.6923076923076923, "acc,exam_id__2012-07": 0.5375, "acc,exam_id__2012-08": 0.625, "acc,exam_id__2014-14": 0.575, "acc,exam_id__2011-03": 0.494949494949495, "acc,exam_id__2013-10": 0.6125, "acc,exam_id__2015-16": 0.5625, "acc,exam_id__2017-23": 0.5, "acc,exam_id__2013-12": 0.6125, "acc,exam_id__2016-20": 0.575, "acc,exam_id__2017-22": 0.525, "acc,exam_id__2016-19": 0.5384615384615384, "acc,exam_id__2010-02": 0.6, "acc,exam_id__2015-18": 0.6125, "acc,exam_id__2013-11": 0.525, "acc,exam_id__2016-21": 0.475, "acc,exam_id__2012-09": 0.5194805194805194, "acc,exam_id__2010-01": 0.47058823529411764, "acc,exam_id__2012-06a": 0.55, "acc,exam_id__2017-24": 0.6, "acc,exam_id__2014-13": 0.5, "acc,exam_id__2014-15": 0.6538461538461539, "acc,exam_id__2016-20a": 0.5375, "acc,exam_id__2018-25": 0.475, "acc,exam_id__2012-06": 0.575, "acc,exam_id__2011-05": 0.5, "acc,exam_id__2011-04": 0.4625, "main_score": 0.5517084282460136 }, "harness|assin2_rte|assin2_rte|None|15": { "f1_macro,all": 0.9097218456052794, "acc,all": 0.9097222222222222, "main_score": 0.9097218456052794 }, "harness|assin2_sts|assin2_sts|None|15": { "pearson,all": 0.7390390977418284, "mse,all": 0.657062908496732, "main_score": 0.7390390977418284 }, "harness|faquad_nli|faquad_nli|None|15": { "f1_macro,all": 0.49676238738738737, "acc,all": 0.796923076923077, "main_score": 0.49676238738738737 }, "harness|hatebr_offensive|hatebr_offensive|None|25": { "f1_macro,all": 0.8117947554592124, "acc,all": 0.8171428571428572, "main_score": 0.8117947554592124 }, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": { "f1_macro,all": 0.7007076712295253, "acc,all": 0.7297297297297297, "main_score": 0.7007076712295253 }, "harness|tweetsentbr|tweetsentbr|None|25": { "f1_macro,all": 0.6181054682174745, "acc,all": 0.7024875621890547, "main_score": 0.6181054682174745 } }
{ "harness|enem_challenge|enem_challenge": "LM Harness task", "harness|bluex|bluex": "LM Harness task", "harness|oab_exams|oab_exams": "LM Harness task", "harness|assin2_rte|assin2_rte": "LM Harness task", "harness|assin2_sts|assin2_sts": "LM Harness task", "harness|faquad_nli|faquad_nli": "LM Harness task", "harness|hatebr_offensive|hatebr_offensive": "LM Harness task", "harness|portuguese_hate_speech|portuguese_hate_speech": "LM Harness task", "harness|tweetsentbr|tweetsentbr": "LM Harness task" }
{ "all": 0, "harness|enem_challenge|enem_challenge": 1.1, "harness|bluex|bluex": 1.1, "harness|oab_exams|oab_exams": 1.5, "harness|assin2_rte|assin2_rte": 1.1, "harness|assin2_sts|assin2_sts": 1.1, "harness|faquad_nli|faquad_nli": 1.1, "harness|hatebr_offensive|hatebr_offensive": 1, "harness|portuguese_hate_speech|portuguese_hate_speech": 1, "harness|tweetsentbr|tweetsentbr": 1 }
{ "harness|enem_challenge|enem_challenge|None|3": { "sample_size": 1429, "truncated": 2, "non_truncated": 1427, "padded": 0, "non_padded": 1429, "fewshots_truncated": 2, "mean_seq_length": 1773.6696990902728, "min_seq_length": 1479, "max_seq_length": 2789, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 2.998600419874038 }, "harness|bluex|bluex|None|3": { "sample_size": 719, "truncated": 3, "non_truncated": 716, "padded": 0, "non_padded": 719, "fewshots_truncated": 5, "mean_seq_length": 1887.1738525730182, "min_seq_length": 1475, "max_seq_length": 2769, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 2.9930458970792766 }, "harness|oab_exams|oab_exams|None|3": { "sample_size": 2195, "truncated": 0, "non_truncated": 2195, "padded": 0, "non_padded": 2195, "fewshots_truncated": 0, "mean_seq_length": 1523.7266514806379, "min_seq_length": 1223, "max_seq_length": 2061, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|assin2_rte|assin2_rte|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1501.5265522875818, "min_seq_length": 1477, "max_seq_length": 1573, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|assin2_sts|assin2_sts|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1720.5265522875818, "min_seq_length": 1696, "max_seq_length": 1792, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|faquad_nli|faquad_nli|None|15": { "sample_size": 650, "truncated": 0, "non_truncated": 650, "padded": 0, "non_padded": 650, "fewshots_truncated": 0, "mean_seq_length": 1760.1292307692308, "min_seq_length": 1700, "max_seq_length": 1893, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|hatebr_offensive|hatebr_offensive|None|25": { "sample_size": 1400, "truncated": 0, "non_truncated": 1400, "padded": 0, "non_padded": 1400, "fewshots_truncated": 0, "mean_seq_length": 1417.9257142857143, "min_seq_length": 1390, "max_seq_length": 1696, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": { "sample_size": 851, "truncated": 0, "non_truncated": 851, "padded": 0, "non_padded": 851, "fewshots_truncated": 0, "mean_seq_length": 1945.7544065804934, "min_seq_length": 1908, "max_seq_length": 1981, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|tweetsentbr|tweetsentbr|None|25": { "sample_size": 2010, "truncated": 0, "non_truncated": 2010, "padded": 0, "non_padded": 2010, "fewshots_truncated": 0, "mean_seq_length": 1763.844776119403, "min_seq_length": 1741, "max_seq_length": 1879, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 } }
{ "truncated": 5, "non_truncated": 14145, "padded": 0, "non_padded": 14150, "fewshots_truncated": 7 }
{ "start_date": "2024-02-28T08-14-36.046639", "start_time": 1709108076.796536, "end_time": 1709130862.5257113, "total_evaluation_time_seconds": 22785.72917532921, "has_chat_template": true, "chat_type": "system_user_assistant", "n_gpus": 1, "accelerate_num_process": null, "model_sha": "099f03d1482e74cd1acb40d6c91c98824a594e12", "model_dtype": "bfloat16", "model_memory_footprint": 68912067584, "model_num_parameters": 34388917248, "model_is_loaded_in_4bit": null, "model_is_loaded_in_8bit": null, "model_is_quantized": null, "model_device": "cuda:0", "batch_size": 1, "max_length": 4096, "max_ctx_length": 4064, "max_gen_toks": 32, "model_name": "01-ai/Yi-34B-Chat", "job_id": 272, "model_id": "01-ai/Yi-34B-Chat_eval_request_False_bfloat16_Original", "model_base_model": "", "model_weight_type": "Original", "model_revision": "main", "model_private": false, "model_type": "πŸ’¬ : chat models (RLHF, DPO, IFT, ...)", "model_architectures": "LlamaForCausalLM", "submitted_time": "2024-02-27T00:40:17", "lm_eval_model_type": "huggingface", "eval_version": "1.1.0" }
{ "all_grouped_average": 0.7076191361692378, "all_grouped_npm": 0.5577893098264872, "all_grouped": { "enem_challenge": 0.7123862841147656, "bluex": 0.6328233657858137, "oab_exams": 0.5202733485193621, "assin2_rte": 0.924014535978148, "assin2_sts": 0.7419038025688336, "faquad_nli": 0.7157210401891253, "hatebr_offensive": 0.7198401711140126, "portuguese_hate_speech": 0.7135410538975384, "tweetsentbr": 0.6880686233555414 }, "all": { "harness|enem_challenge|enem_challenge|None|3": 0.7123862841147656, "harness|bluex|bluex|None|3": 0.6328233657858137, "harness|oab_exams|oab_exams|None|3": 0.5202733485193621, "harness|assin2_rte|assin2_rte|None|15": 0.924014535978148, "harness|assin2_sts|assin2_sts|None|15": 0.7419038025688336, "harness|faquad_nli|faquad_nli|None|15": 0.7157210401891253, "harness|hatebr_offensive|hatebr_offensive|None|25": 0.7198401711140126, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": 0.7135410538975384, "harness|tweetsentbr|tweetsentbr|None|25": 0.6880686233555414 }, "harness|enem_challenge|enem_challenge|None|3": { "acc,all": 0.7123862841147656, "acc,exam_id__2016_2": 0.6910569105691057, "acc,exam_id__2023": 0.7481481481481481, "acc,exam_id__2014": 0.7522935779816514, "acc,exam_id__2017": 0.7241379310344828, "acc,exam_id__2009": 0.7391304347826086, "acc,exam_id__2015": 0.7142857142857143, "acc,exam_id__2016": 0.6528925619834711, "acc,exam_id__2022": 0.6616541353383458, "acc,exam_id__2012": 0.6637931034482759, "acc,exam_id__2013": 0.7129629629629629, "acc,exam_id__2011": 0.8034188034188035, "acc,exam_id__2010": 0.6923076923076923, "main_score": 0.7123862841147656 }, "harness|bluex|bluex|None|3": { "acc,all": 0.6328233657858137, "acc,exam_id__USP_2023": 0.7954545454545454, "acc,exam_id__UNICAMP_2023": 0.7209302325581395, "acc,exam_id__UNICAMP_2024": 0.6666666666666666, "acc,exam_id__USP_2021": 0.6538461538461539, "acc,exam_id__UNICAMP_2021_2": 0.6470588235294118, "acc,exam_id__UNICAMP_2019": 0.62, "acc,exam_id__UNICAMP_2022": 0.7435897435897436, "acc,exam_id__UNICAMP_2018": 0.5370370370370371, "acc,exam_id__UNICAMP_2020": 0.5818181818181818, "acc,exam_id__USP_2020": 0.625, "acc,exam_id__USP_2018": 0.48148148148148145, "acc,exam_id__USP_2019": 0.5, "acc,exam_id__UNICAMP_2021_1": 0.5652173913043478, "acc,exam_id__USP_2024": 0.7804878048780488, "acc,exam_id__USP_2022": 0.6530612244897959, "main_score": 0.6328233657858137 }, "harness|oab_exams|oab_exams|None|3": { "acc,all": 0.5202733485193621, "acc,exam_id__2012-08": 0.525, "acc,exam_id__2015-17": 0.5897435897435898, "acc,exam_id__2012-09": 0.42857142857142855, "acc,exam_id__2013-11": 0.55, "acc,exam_id__2014-13": 0.4625, "acc,exam_id__2012-06": 0.525, "acc,exam_id__2017-24": 0.5125, "acc,exam_id__2010-01": 0.49411764705882355, "acc,exam_id__2016-20a": 0.425, "acc,exam_id__2012-06a": 0.5375, "acc,exam_id__2017-23": 0.4375, "acc,exam_id__2014-14": 0.575, "acc,exam_id__2018-25": 0.5, "acc,exam_id__2013-10": 0.5375, "acc,exam_id__2011-05": 0.5, "acc,exam_id__2017-22": 0.6, "acc,exam_id__2011-03": 0.40404040404040403, "acc,exam_id__2016-21": 0.475, "acc,exam_id__2015-16": 0.5, "acc,exam_id__2011-04": 0.4375, "acc,exam_id__2016-20": 0.6, "acc,exam_id__2014-15": 0.6153846153846154, "acc,exam_id__2012-07": 0.5125, "acc,exam_id__2016-19": 0.5256410256410257, "acc,exam_id__2015-18": 0.525, "acc,exam_id__2013-12": 0.6625, "acc,exam_id__2010-02": 0.6, "main_score": 0.5202733485193621 }, "harness|assin2_rte|assin2_rte|None|15": { "f1_macro,all": 0.924014535978148, "acc,all": 0.9240196078431373, "main_score": 0.924014535978148 }, "harness|assin2_sts|assin2_sts|None|15": { "pearson,all": 0.7419038025688336, "mse,all": 0.6120996732026143, "main_score": 0.7419038025688336 }, "harness|faquad_nli|faquad_nli|None|15": { "f1_macro,all": 0.7157210401891253, "acc,all": 0.7723076923076924, "main_score": 0.7157210401891253 }, "harness|hatebr_offensive|hatebr_offensive|None|25": { "f1_macro,all": 0.7198401711140126, "acc,all": 0.7385714285714285, "main_score": 0.7198401711140126 }, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": { "f1_macro,all": 0.7135410538975384, "acc,all": 0.7579318448883666, "main_score": 0.7135410538975384 }, "harness|tweetsentbr|tweetsentbr|None|25": { "f1_macro,all": 0.6880686233555414, "acc,all": 0.7323383084577114, "main_score": 0.6880686233555414 } }
{ "harness|enem_challenge|enem_challenge": "LM Harness task", "harness|bluex|bluex": "LM Harness task", "harness|oab_exams|oab_exams": "LM Harness task", "harness|assin2_rte|assin2_rte": "LM Harness task", "harness|assin2_sts|assin2_sts": "LM Harness task", "harness|faquad_nli|faquad_nli": "LM Harness task", "harness|hatebr_offensive|hatebr_offensive": "LM Harness task", "harness|portuguese_hate_speech|portuguese_hate_speech": "LM Harness task", "harness|tweetsentbr|tweetsentbr": "LM Harness task" }
{ "all": 0, "harness|enem_challenge|enem_challenge": 1.1, "harness|bluex|bluex": 1.1, "harness|oab_exams|oab_exams": 1.5, "harness|assin2_rte|assin2_rte": 1.1, "harness|assin2_sts|assin2_sts": 1.1, "harness|faquad_nli|faquad_nli": 1.1, "harness|hatebr_offensive|hatebr_offensive": 1, "harness|portuguese_hate_speech|portuguese_hate_speech": 1, "harness|tweetsentbr|tweetsentbr": 1 }
{ "harness|enem_challenge|enem_challenge|None|3": { "sample_size": 1429, "truncated": 0, "non_truncated": 1429, "padded": 0, "non_padded": 1429, "fewshots_truncated": 0, "mean_seq_length": 1816.6696990902728, "min_seq_length": 1522, "max_seq_length": 2832, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|bluex|bluex|None|3": { "sample_size": 719, "truncated": 0, "non_truncated": 719, "padded": 0, "non_padded": 719, "fewshots_truncated": 0, "mean_seq_length": 1930.1738525730182, "min_seq_length": 1518, "max_seq_length": 2812, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|oab_exams|oab_exams|None|3": { "sample_size": 2195, "truncated": 0, "non_truncated": 2195, "padded": 0, "non_padded": 2195, "fewshots_truncated": 0, "mean_seq_length": 1566.7266514806379, "min_seq_length": 1266, "max_seq_length": 2104, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|assin2_rte|assin2_rte|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1664.5265522875818, "min_seq_length": 1640, "max_seq_length": 1736, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|assin2_sts|assin2_sts|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1868.5265522875818, "min_seq_length": 1844, "max_seq_length": 1940, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|faquad_nli|faquad_nli|None|15": { "sample_size": 650, "truncated": 0, "non_truncated": 650, "padded": 0, "non_padded": 650, "fewshots_truncated": 0, "mean_seq_length": 1923.1292307692308, "min_seq_length": 1863, "max_seq_length": 2056, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|hatebr_offensive|hatebr_offensive|None|25": { "sample_size": 1400, "truncated": 0, "non_truncated": 1400, "padded": 0, "non_padded": 1400, "fewshots_truncated": 0, "mean_seq_length": 1680.9257142857143, "min_seq_length": 1653, "max_seq_length": 1959, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": { "sample_size": 851, "truncated": 0, "non_truncated": 851, "padded": 0, "non_padded": 851, "fewshots_truncated": 0, "mean_seq_length": 2208.7544065804937, "min_seq_length": 2171, "max_seq_length": 2244, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|tweetsentbr|tweetsentbr|None|25": { "sample_size": 2010, "truncated": 0, "non_truncated": 2010, "padded": 0, "non_padded": 2010, "fewshots_truncated": 0, "mean_seq_length": 1894.844776119403, "min_seq_length": 1872, "max_seq_length": 2010, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 } }
{ "truncated": 0, "non_truncated": 14150, "padded": 0, "non_padded": 14150, "fewshots_truncated": 0 }
{ "start_date": "2024-02-07T03-32-43.048071", "start_time": 1707276763.7733982, "end_time": 1707287202.8406198, "total_evaluation_time_seconds": 10439.06722164154, "has_chat_template": false, "chat_type": null, "n_gpus": 1, "accelerate_num_process": null, "model_sha": "5def4b3897b0a492a6411e2d5c62bb4d31bed5f8", "model_dtype": "bfloat16", "model_memory_footprint": 68903678976, "model_num_parameters": 34388917248, "model_is_loaded_in_4bit": false, "model_is_loaded_in_8bit": false, "model_is_quantized": null, "model_device": "cuda:1", "batch_size": 2, "max_length": 4096, "max_ctx_length": 4064, "max_gen_toks": 32, "model_name": "01-ai/Yi-34B", "job_id": 33, "model_id": "01-ai/Yi-34B_eval_request_False_bfloat16_Original", "model_base_model": "", "model_weight_type": "Original", "model_revision": "main", "model_private": false, "model_type": "🟒 : pretrained", "model_architectures": "LlamaForCausalLM", "submitted_time": "2024-02-05T23:05:39", "lm_eval_model_type": "huggingface", "eval_version": "1.0.0" }
{ "all_grouped_average": 0.6416817888200871, "all_grouped_npm": 0.4958265468665359, "all_grouped": { "enem_challenge": 0.7214835549335199, "bluex": 0.6842837273991655, "oab_exams": 0.566742596810934, "assin2_rte": 0.7095337812960236, "assin2_sts": 0.6212032386293976, "faquad_nli": 0.7969022005981341, "sparrow_pt": 0.3916234220734354 }, "all": { "harness|enem_challenge|enem_challenge|None|3": 0.7214835549335199, "harness|bluex|bluex|None|3": 0.6842837273991655, "harness|oab_exams|oab_exams|None|3": 0.566742596810934, "harness|assin2_rte|assin2_rte|None|15": 0.7095337812960236, "harness|assin2_sts|assin2_sts|None|15": 0.6212032386293976, "harness|faquad_nli|faquad_nli|None|15": 0.7969022005981341, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": 0.09491912944063828, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": 0.6137474789894183, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": 0.5216710358271157, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": 0.3361560440365694 }, "harness|enem_challenge|enem_challenge|None|3": { "acc,all": 0.7214835549335199, "acc,exam_id__2016_2": 0.7073170731707317, "acc,exam_id__2016": 0.6694214876033058, "acc,exam_id__2010": 0.7094017094017094, "acc,exam_id__2014": 0.7431192660550459, "acc,exam_id__2023": 0.7555555555555555, "acc,exam_id__2017": 0.7155172413793104, "acc,exam_id__2015": 0.6890756302521008, "acc,exam_id__2012": 0.7068965517241379, "acc,exam_id__2022": 0.6917293233082706, "acc,exam_id__2013": 0.7129629629629629, "acc,exam_id__2009": 0.7217391304347827, "acc,exam_id__2011": 0.8376068376068376, "main_score": 0.7214835549335199 }, "harness|bluex|bluex|None|3": { "acc,all": 0.6842837273991655, "acc,exam_id__USP_2019": 0.6, "acc,exam_id__UNICAMP_2018": 0.6296296296296297, "acc,exam_id__UNICAMP_2020": 0.6727272727272727, "acc,exam_id__UNICAMP_2023": 0.7441860465116279, "acc,exam_id__UNICAMP_2024": 0.6666666666666666, "acc,exam_id__UNICAMP_2019": 0.7, "acc,exam_id__UNICAMP_2021_2": 0.6862745098039216, "acc,exam_id__USP_2022": 0.6530612244897959, "acc,exam_id__USP_2020": 0.6607142857142857, "acc,exam_id__USP_2018": 0.6851851851851852, "acc,exam_id__USP_2021": 0.6153846153846154, "acc,exam_id__USP_2023": 0.75, "acc,exam_id__USP_2024": 0.8292682926829268, "acc,exam_id__UNICAMP_2021_1": 0.6956521739130435, "acc,exam_id__UNICAMP_2022": 0.717948717948718, "main_score": 0.6842837273991655 }, "harness|oab_exams|oab_exams|None|3": { "acc,all": 0.566742596810934, "acc,exam_id__2014-13": 0.525, "acc,exam_id__2012-07": 0.5875, "acc,exam_id__2012-09": 0.4675324675324675, "acc,exam_id__2011-03": 0.5151515151515151, "acc,exam_id__2012-08": 0.5375, "acc,exam_id__2015-17": 0.6410256410256411, "acc,exam_id__2014-15": 0.6410256410256411, "acc,exam_id__2017-24": 0.6125, "acc,exam_id__2015-16": 0.525, "acc,exam_id__2017-23": 0.5375, "acc,exam_id__2011-04": 0.4125, "acc,exam_id__2010-02": 0.67, "acc,exam_id__2016-19": 0.5641025641025641, "acc,exam_id__2012-06": 0.55, "acc,exam_id__2012-06a": 0.5625, "acc,exam_id__2013-12": 0.625, "acc,exam_id__2017-22": 0.5375, "acc,exam_id__2010-01": 0.5411764705882353, "acc,exam_id__2013-10": 0.575, "acc,exam_id__2014-14": 0.65, "acc,exam_id__2018-25": 0.55, "acc,exam_id__2016-20a": 0.4125, "acc,exam_id__2015-18": 0.6375, "acc,exam_id__2011-05": 0.6, "acc,exam_id__2013-11": 0.6125, "acc,exam_id__2016-20": 0.6125, "acc,exam_id__2016-21": 0.5875, "main_score": 0.566742596810934 }, "harness|assin2_rte|assin2_rte|None|15": { "f1_macro,all": 0.7095337812960236, "acc,all": 0.7303921568627451, "main_score": 0.7095337812960236 }, "harness|assin2_sts|assin2_sts|None|15": { "pearson,all": 0.6212032386293976, "mse,all": 1.4760457516339869, "main_score": 0.6212032386293976 }, "harness|faquad_nli|faquad_nli|None|15": { "f1_macro,all": 0.7969022005981341, "acc,all": 0.8707692307692307, "main_score": 0.7969022005981341 }, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": { "f1_macro,all": 0.09491912944063828, "acc,all": 0.156, "main_score": 0.09491912944063828 }, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": { "f1_macro,all": 0.6137474789894183, "acc,all": 0.702, "main_score": 0.6137474789894183 }, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": { "f1_macro,all": 0.5216710358271157, "acc,all": 0.636, "main_score": 0.5216710358271157 }, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": { "f1_macro,all": 0.3361560440365694, "acc,all": 0.39, "main_score": 0.3361560440365694 } }
{ "harness|enem_challenge|enem_challenge": "LM Harness task", "harness|bluex|bluex": "LM Harness task", "harness|oab_exams|oab_exams": "LM Harness task", "harness|assin2_rte|assin2_rte": "LM Harness task", "harness|assin2_sts|assin2_sts": "LM Harness task", "harness|faquad_nli|faquad_nli": "LM Harness task", "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por": "LM Harness task", "harness|sparrow_pt|sparrow_hate-2019-fortuna-por": "LM Harness task", "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por": "LM Harness task", "harness|sparrow_pt|sparrow_sentiment-2018-brum-por": "LM Harness task" }
{ "all": 0, "harness|enem_challenge|enem_challenge": 1, "harness|bluex|bluex": 1, "harness|oab_exams|oab_exams": 1.4, "harness|assin2_rte|assin2_rte": 1, "harness|assin2_sts|assin2_sts": 1, "harness|faquad_nli|faquad_nli": 1, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por": 1, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por": 1, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por": 1, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por": 1 }
{ "harness|enem_challenge|enem_challenge|None|3": { "sample_size": 1429, "truncated": 0, "non_truncated": 1429, "padded": 0, "non_padded": 1429, "fewshots_truncated": 0, "mean_seq_length": 1749.6696990902728, "min_seq_length": 1455, "max_seq_length": 2765, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|bluex|bluex|None|3": { "sample_size": 719, "truncated": 0, "non_truncated": 719, "padded": 0, "non_padded": 719, "fewshots_truncated": 0, "mean_seq_length": 1473.1738525730182, "min_seq_length": 1061, "max_seq_length": 2355, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|oab_exams|oab_exams|None|3": { "sample_size": 2195, "truncated": 0, "non_truncated": 2195, "padded": 0, "non_padded": 2195, "fewshots_truncated": 0, "mean_seq_length": 1501.7266514806379, "min_seq_length": 1201, "max_seq_length": 2039, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|assin2_rte|assin2_rte|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1376.5265522875818, "min_seq_length": 1352, "max_seq_length": 1448, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|assin2_sts|assin2_sts|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1558.5265522875818, "min_seq_length": 1534, "max_seq_length": 1630, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|faquad_nli|faquad_nli|None|15": { "sample_size": 650, "truncated": 0, "non_truncated": 650, "padded": 0, "non_padded": 650, "fewshots_truncated": 0, "mean_seq_length": 1721.1292307692308, "min_seq_length": 1661, "max_seq_length": 1854, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1852.022, "min_seq_length": 1827, "max_seq_length": 1894, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1902.65, "min_seq_length": 1876, "max_seq_length": 1944, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1546.77, "min_seq_length": 1527, "max_seq_length": 1591, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1714.068, "min_seq_length": 1694, "max_seq_length": 1755, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 } }
{ "truncated": 0, "non_truncated": 11889, "padded": 0, "non_padded": 11889, "fewshots_truncated": 0 }
{ "start_date": "2024-04-13T15-53-49.411062", "start_time": 1713023631.9359422, "end_time": 1713036538.0450711, "total_evaluation_time_seconds": 12906.109128952026, "has_chat_template": false, "chat_type": null, "n_gpus": 1, "accelerate_num_process": null, "model_sha": "629dd3c6987bc668e50fde5dc8f6473364b652d4", "model_dtype": "bfloat16", "model_memory_footprint": 68903678976, "model_num_parameters": 34388917248, "model_is_loaded_in_4bit": null, "model_is_loaded_in_8bit": null, "model_is_quantized": null, "model_device": "cuda:0", "batch_size": 4, "max_length": 2560, "max_ctx_length": 2528, "max_gen_toks": 32, "model_name": "01-ai/Yi-34B", "job_id": 440, "model_id": "01-ai/Yi-34B_eval_request_False_bfloat16_Original", "model_base_model": "", "model_weight_type": "Original", "model_revision": "main", "model_private": false, "model_type": "🟒 : pretrained", "model_architectures": "LlamaForCausalLM", "submitted_time": "2024-02-05T23:05:39", "lm_eval_model_type": "huggingface", "eval_version": "1.1.0" }
{ "all_grouped_average": 0.7305963183589452, "all_grouped_npm": 0.5919780249153634, "all_grouped": { "enem_challenge": 0.7207837648705389, "bluex": 0.6648122392211405, "oab_exams": 0.5599088838268793, "assin2_rte": 0.917882167398896, "assin2_sts": 0.76681855136608, "faquad_nli": 0.7798334442926054, "hatebr_offensive": 0.8107834570679608, "portuguese_hate_speech": 0.6224786612758311, "tweetsentbr": 0.7320656959105744 }, "all": { "harness|enem_challenge|enem_challenge|None|3": 0.7207837648705389, "harness|bluex|bluex|None|3": 0.6648122392211405, "harness|oab_exams|oab_exams|None|3": 0.5599088838268793, "harness|assin2_rte|assin2_rte|None|15": 0.917882167398896, "harness|assin2_sts|assin2_sts|None|15": 0.76681855136608, "harness|faquad_nli|faquad_nli|None|15": 0.7798334442926054, "harness|hatebr_offensive|hatebr_offensive|None|25": 0.8107834570679608, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": 0.6224786612758311, "harness|tweetsentbr|tweetsentbr|None|25": 0.7320656959105744 }, "harness|enem_challenge|enem_challenge|None|3": { "acc,all": 0.7207837648705389, "acc,exam_id__2009": 0.7217391304347827, "acc,exam_id__2011": 0.8376068376068376, "acc,exam_id__2010": 0.7094017094017094, "acc,exam_id__2014": 0.7339449541284404, "acc,exam_id__2016": 0.6694214876033058, "acc,exam_id__2016_2": 0.6910569105691057, "acc,exam_id__2013": 0.7037037037037037, "acc,exam_id__2023": 0.7481481481481481, "acc,exam_id__2022": 0.6917293233082706, "acc,exam_id__2012": 0.7155172413793104, "acc,exam_id__2017": 0.7327586206896551, "acc,exam_id__2015": 0.6974789915966386, "main_score": 0.7207837648705389 }, "harness|bluex|bluex|None|3": { "acc,all": 0.6648122392211405, "acc,exam_id__UNICAMP_2018": 0.6111111111111112, "acc,exam_id__USP_2019": 0.575, "acc,exam_id__UNICAMP_2021_2": 0.6274509803921569, "acc,exam_id__UNICAMP_2022": 0.7435897435897436, "acc,exam_id__USP_2021": 0.6538461538461539, "acc,exam_id__UNICAMP_2024": 0.6666666666666666, "acc,exam_id__USP_2022": 0.6326530612244898, "acc,exam_id__UNICAMP_2020": 0.6545454545454545, "acc,exam_id__USP_2024": 0.8292682926829268, "acc,exam_id__USP_2018": 0.6111111111111112, "acc,exam_id__USP_2023": 0.7272727272727273, "acc,exam_id__UNICAMP_2023": 0.6744186046511628, "acc,exam_id__UNICAMP_2021_1": 0.6739130434782609, "acc,exam_id__UNICAMP_2019": 0.66, "acc,exam_id__USP_2020": 0.6785714285714286, "main_score": 0.6648122392211405 }, "harness|oab_exams|oab_exams|None|3": { "acc,all": 0.5599088838268793, "acc,exam_id__2016-21": 0.6, "acc,exam_id__2011-05": 0.5875, "acc,exam_id__2012-06": 0.525, "acc,exam_id__2011-03": 0.48484848484848486, "acc,exam_id__2016-20a": 0.4125, "acc,exam_id__2017-22": 0.5125, "acc,exam_id__2014-14": 0.625, "acc,exam_id__2012-09": 0.4675324675324675, "acc,exam_id__2014-13": 0.525, "acc,exam_id__2018-25": 0.5625, "acc,exam_id__2014-15": 0.6666666666666666, "acc,exam_id__2012-08": 0.5125, "acc,exam_id__2016-19": 0.5512820512820513, "acc,exam_id__2017-23": 0.55, "acc,exam_id__2010-01": 0.5411764705882353, "acc,exam_id__2016-20": 0.6125, "acc,exam_id__2013-12": 0.65, "acc,exam_id__2011-04": 0.4, "acc,exam_id__2015-17": 0.6923076923076923, "acc,exam_id__2013-11": 0.6, "acc,exam_id__2012-06a": 0.55, "acc,exam_id__2010-02": 0.63, "acc,exam_id__2012-07": 0.5375, "acc,exam_id__2015-18": 0.6125, "acc,exam_id__2015-16": 0.525, "acc,exam_id__2017-24": 0.5875, "acc,exam_id__2013-10": 0.6, "main_score": 0.5599088838268793 }, "harness|assin2_rte|assin2_rte|None|15": { "f1_macro,all": 0.917882167398896, "acc,all": 0.9178921568627451, "main_score": 0.917882167398896 }, "harness|assin2_sts|assin2_sts|None|15": { "pearson,all": 0.76681855136608, "mse,all": 0.5763562091503268, "main_score": 0.76681855136608 }, "harness|faquad_nli|faquad_nli|None|15": { "f1_macro,all": 0.7798334442926054, "acc,all": 0.8661538461538462, "main_score": 0.7798334442926054 }, "harness|hatebr_offensive|hatebr_offensive|None|25": { "f1_macro,all": 0.8107834570679608, "acc,all": 0.8157142857142857, "main_score": 0.8107834570679608 }, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": { "f1_macro,all": 0.6224786612758311, "acc,all": 0.6286721504112809, "main_score": 0.6224786612758311 }, "harness|tweetsentbr|tweetsentbr|None|25": { "f1_macro,all": 0.7320656959105744, "acc,all": 0.7522388059701492, "main_score": 0.7320656959105744 } }
{ "harness|enem_challenge|enem_challenge": "LM Harness task", "harness|bluex|bluex": "LM Harness task", "harness|oab_exams|oab_exams": "LM Harness task", "harness|assin2_rte|assin2_rte": "LM Harness task", "harness|assin2_sts|assin2_sts": "LM Harness task", "harness|faquad_nli|faquad_nli": "LM Harness task", "harness|hatebr_offensive|hatebr_offensive": "LM Harness task", "harness|portuguese_hate_speech|portuguese_hate_speech": "LM Harness task", "harness|tweetsentbr|tweetsentbr": "LM Harness task" }
{ "all": 0, "harness|enem_challenge|enem_challenge": 1.1, "harness|bluex|bluex": 1.1, "harness|oab_exams|oab_exams": 1.5, "harness|assin2_rte|assin2_rte": 1.1, "harness|assin2_sts|assin2_sts": 1.1, "harness|faquad_nli|faquad_nli": 1.1, "harness|hatebr_offensive|hatebr_offensive": 1, "harness|portuguese_hate_speech|portuguese_hate_speech": 1, "harness|tweetsentbr|tweetsentbr": 1 }
{ "harness|enem_challenge|enem_challenge|None|3": { "sample_size": 1429, "truncated": 2, "non_truncated": 1427, "padded": 0, "non_padded": 1429, "fewshots_truncated": 2, "mean_seq_length": 1773.6696990902728, "min_seq_length": 1479, "max_seq_length": 2789, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 2.998600419874038 }, "harness|bluex|bluex|None|3": { "sample_size": 719, "truncated": 3, "non_truncated": 716, "padded": 0, "non_padded": 719, "fewshots_truncated": 5, "mean_seq_length": 1887.1738525730182, "min_seq_length": 1475, "max_seq_length": 2769, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 2.9930458970792766 }, "harness|oab_exams|oab_exams|None|3": { "sample_size": 2195, "truncated": 0, "non_truncated": 2195, "padded": 0, "non_padded": 2195, "fewshots_truncated": 0, "mean_seq_length": 1523.7266514806379, "min_seq_length": 1223, "max_seq_length": 2061, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|assin2_rte|assin2_rte|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1501.5265522875818, "min_seq_length": 1477, "max_seq_length": 1573, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|assin2_sts|assin2_sts|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1720.5265522875818, "min_seq_length": 1696, "max_seq_length": 1792, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|faquad_nli|faquad_nli|None|15": { "sample_size": 650, "truncated": 0, "non_truncated": 650, "padded": 0, "non_padded": 650, "fewshots_truncated": 0, "mean_seq_length": 1760.1292307692308, "min_seq_length": 1700, "max_seq_length": 1893, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|hatebr_offensive|hatebr_offensive|None|25": { "sample_size": 1400, "truncated": 0, "non_truncated": 1400, "padded": 0, "non_padded": 1400, "fewshots_truncated": 0, "mean_seq_length": 1417.9257142857143, "min_seq_length": 1390, "max_seq_length": 1696, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": { "sample_size": 851, "truncated": 0, "non_truncated": 851, "padded": 0, "non_padded": 851, "fewshots_truncated": 0, "mean_seq_length": 1945.7544065804934, "min_seq_length": 1908, "max_seq_length": 1981, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|tweetsentbr|tweetsentbr|None|25": { "sample_size": 2010, "truncated": 0, "non_truncated": 2010, "padded": 0, "non_padded": 2010, "fewshots_truncated": 0, "mean_seq_length": 1614.844776119403, "min_seq_length": 1592, "max_seq_length": 1730, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 } }
{ "truncated": 5, "non_truncated": 14145, "padded": 0, "non_padded": 14150, "fewshots_truncated": 7 }
{ "start_date": "2024-02-09T07-10-30.923872", "start_time": 1707462631.48241, "end_time": 1707465739.902993, "total_evaluation_time_seconds": 3108.42058300972, "has_chat_template": false, "chat_type": null, "n_gpus": 1, "accelerate_num_process": null, "model_sha": "525391aeca61e8b7701fd83da1d9328273fd3511", "model_dtype": "bfloat16", "model_memory_footprint": 15398879232, "model_num_parameters": 6061035520, "model_is_loaded_in_4bit": false, "model_is_loaded_in_8bit": false, "model_is_quantized": null, "model_device": "cuda:1", "batch_size": 16, "max_length": 4096, "max_ctx_length": 4064, "max_gen_toks": 32, "model_name": "01-ai/Yi-6B-200K", "job_id": 121, "model_id": "01-ai/Yi-6B-200K_eval_request_False_bfloat16_Original", "model_base_model": "", "model_weight_type": "Original", "model_revision": "main", "model_private": false, "model_type": "🟒 : pretrained", "model_architectures": "LlamaForCausalLM", "submitted_time": "2024-02-05T23:18:12", "lm_eval_model_type": "huggingface", "eval_version": "1.0.0" }
{ "all_grouped_average": 0.47606572503004063, "all_grouped_npm": 0.2713265790562599, "all_grouped": { "enem_challenge": 0.5703289013296011, "bluex": 0.49791376912378305, "oab_exams": 0.4419134396355353, "assin2_rte": 0.7574463542222696, "assin2_sts": 0.3059276997249324, "faquad_nli": 0.4471267110923455, "sparrow_pt": 0.31180320008181783 }, "all": { "harness|enem_challenge|enem_challenge|None|3": 0.5703289013296011, "harness|bluex|bluex|None|3": 0.49791376912378305, "harness|oab_exams|oab_exams|None|3": 0.4419134396355353, "harness|assin2_rte|assin2_rte|None|15": 0.7574463542222696, "harness|assin2_sts|assin2_sts|None|15": 0.3059276997249324, "harness|faquad_nli|faquad_nli|None|15": 0.4471267110923455, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": 0.03920512763931725, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": 0.3932038834951456, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": 0.48971642449181224, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": 0.32508736470099614 }, "harness|enem_challenge|enem_challenge|None|3": { "acc,all": 0.5703289013296011, "acc,exam_id__2023": 0.6296296296296297, "acc,exam_id__2009": 0.5304347826086957, "acc,exam_id__2013": 0.5277777777777778, "acc,exam_id__2011": 0.6495726495726496, "acc,exam_id__2014": 0.5688073394495413, "acc,exam_id__2016_2": 0.5528455284552846, "acc,exam_id__2022": 0.5714285714285714, "acc,exam_id__2017": 0.5948275862068966, "acc,exam_id__2016": 0.5619834710743802, "acc,exam_id__2015": 0.5630252100840336, "acc,exam_id__2012": 0.5431034482758621, "acc,exam_id__2010": 0.5384615384615384, "main_score": 0.5703289013296011 }, "harness|bluex|bluex|None|3": { "acc,all": 0.49791376912378305, "acc,exam_id__USP_2018": 0.4444444444444444, "acc,exam_id__USP_2024": 0.5853658536585366, "acc,exam_id__USP_2021": 0.5192307692307693, "acc,exam_id__UNICAMP_2021_1": 0.5, "acc,exam_id__USP_2020": 0.5714285714285714, "acc,exam_id__USP_2022": 0.4489795918367347, "acc,exam_id__UNICAMP_2022": 0.48717948717948717, "acc,exam_id__UNICAMP_2020": 0.5454545454545454, "acc,exam_id__USP_2023": 0.6363636363636364, "acc,exam_id__USP_2019": 0.375, "acc,exam_id__UNICAMP_2023": 0.5581395348837209, "acc,exam_id__UNICAMP_2024": 0.5555555555555556, "acc,exam_id__UNICAMP_2021_2": 0.39215686274509803, "acc,exam_id__UNICAMP_2019": 0.5, "acc,exam_id__UNICAMP_2018": 0.37037037037037035, "main_score": 0.49791376912378305 }, "harness|oab_exams|oab_exams|None|3": { "acc,all": 0.4419134396355353, "acc,exam_id__2010-01": 0.32941176470588235, "acc,exam_id__2017-24": 0.4, "acc,exam_id__2016-21": 0.4875, "acc,exam_id__2016-19": 0.4358974358974359, "acc,exam_id__2013-12": 0.5, "acc,exam_id__2015-17": 0.46153846153846156, "acc,exam_id__2012-09": 0.33766233766233766, "acc,exam_id__2014-14": 0.4875, "acc,exam_id__2013-10": 0.45, "acc,exam_id__2012-06a": 0.5125, "acc,exam_id__2016-20a": 0.4, "acc,exam_id__2018-25": 0.5, "acc,exam_id__2011-04": 0.4125, "acc,exam_id__2011-05": 0.4375, "acc,exam_id__2017-22": 0.375, "acc,exam_id__2014-13": 0.425, "acc,exam_id__2012-08": 0.45, "acc,exam_id__2013-11": 0.45, "acc,exam_id__2011-03": 0.3939393939393939, "acc,exam_id__2012-07": 0.45, "acc,exam_id__2015-16": 0.475, "acc,exam_id__2014-15": 0.5128205128205128, "acc,exam_id__2010-02": 0.44, "acc,exam_id__2016-20": 0.475, "acc,exam_id__2017-23": 0.475, "acc,exam_id__2012-06": 0.4125, "acc,exam_id__2015-18": 0.4625, "main_score": 0.4419134396355353 }, "harness|assin2_rte|assin2_rte|None|15": { "f1_macro,all": 0.7574463542222696, "acc,all": 0.7679738562091504, "main_score": 0.7574463542222696 }, "harness|assin2_sts|assin2_sts|None|15": { "pearson,all": 0.3059276997249324, "mse,all": 1.9058129084967315, "main_score": 0.3059276997249324 }, "harness|faquad_nli|faquad_nli|None|15": { "f1_macro,all": 0.4471267110923455, "acc,all": 0.7861538461538462, "main_score": 0.4471267110923455 }, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": { "f1_macro,all": 0.03920512763931725, "acc,all": 0.102, "main_score": 0.03920512763931725 }, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": { "f1_macro,all": 0.3932038834951456, "acc,all": 0.648, "main_score": 0.3932038834951456 }, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": { "f1_macro,all": 0.48971642449181224, "acc,all": 0.648, "main_score": 0.48971642449181224 }, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": { "f1_macro,all": 0.32508736470099614, "acc,all": 0.388, "main_score": 0.32508736470099614 } }
{ "harness|enem_challenge|enem_challenge": "LM Harness task", "harness|bluex|bluex": "LM Harness task", "harness|oab_exams|oab_exams": "LM Harness task", "harness|assin2_rte|assin2_rte": "LM Harness task", "harness|assin2_sts|assin2_sts": "LM Harness task", "harness|faquad_nli|faquad_nli": "LM Harness task", "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por": "LM Harness task", "harness|sparrow_pt|sparrow_hate-2019-fortuna-por": "LM Harness task", "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por": "LM Harness task", "harness|sparrow_pt|sparrow_sentiment-2018-brum-por": "LM Harness task" }
{ "all": 0, "harness|enem_challenge|enem_challenge": 1, "harness|bluex|bluex": 1, "harness|oab_exams|oab_exams": 1.4, "harness|assin2_rte|assin2_rte": 1, "harness|assin2_sts|assin2_sts": 1, "harness|faquad_nli|faquad_nli": 1, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por": 1, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por": 1, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por": 1, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por": 1 }
{ "harness|enem_challenge|enem_challenge|None|3": { "sample_size": 1429, "truncated": 0, "non_truncated": 1429, "padded": 0, "non_padded": 1429, "fewshots_truncated": 0, "mean_seq_length": 1749.6696990902728, "min_seq_length": 1455, "max_seq_length": 2765, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|bluex|bluex|None|3": { "sample_size": 719, "truncated": 0, "non_truncated": 719, "padded": 0, "non_padded": 719, "fewshots_truncated": 0, "mean_seq_length": 1473.1738525730182, "min_seq_length": 1061, "max_seq_length": 2355, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|oab_exams|oab_exams|None|3": { "sample_size": 2195, "truncated": 0, "non_truncated": 2195, "padded": 0, "non_padded": 2195, "fewshots_truncated": 0, "mean_seq_length": 1501.7266514806379, "min_seq_length": 1201, "max_seq_length": 2039, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|assin2_rte|assin2_rte|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1376.5265522875818, "min_seq_length": 1352, "max_seq_length": 1448, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|assin2_sts|assin2_sts|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1558.5265522875818, "min_seq_length": 1534, "max_seq_length": 1630, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|faquad_nli|faquad_nli|None|15": { "sample_size": 650, "truncated": 0, "non_truncated": 650, "padded": 0, "non_padded": 650, "fewshots_truncated": 0, "mean_seq_length": 1721.1292307692308, "min_seq_length": 1661, "max_seq_length": 1854, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1852.022, "min_seq_length": 1827, "max_seq_length": 1894, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1902.65, "min_seq_length": 1876, "max_seq_length": 1944, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1546.77, "min_seq_length": 1527, "max_seq_length": 1591, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1714.068, "min_seq_length": 1694, "max_seq_length": 1755, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 } }
{ "truncated": 0, "non_truncated": 11889, "padded": 0, "non_padded": 11889, "fewshots_truncated": 0 }
{ "start_date": "2024-04-16T17-07-31.622853", "start_time": 1713287252.4200323, "end_time": 1713321164.768971, "total_evaluation_time_seconds": 33912.34893870354, "has_chat_template": false, "chat_type": null, "n_gpus": 1, "accelerate_num_process": null, "model_sha": "cfccc5f336c2de656003b430e16bc2086a272384", "model_dtype": "bfloat16", "model_memory_footprint": 15398879232, "model_num_parameters": 6061035520, "model_is_loaded_in_4bit": null, "model_is_loaded_in_8bit": null, "model_is_quantized": null, "model_device": "cuda:0", "batch_size": 4, "max_length": 2560, "max_ctx_length": 2528, "max_gen_toks": 32, "model_name": "01-ai/Yi-6B-200K", "job_id": 469, "model_id": "01-ai/Yi-6B-200K_eval_request_False_bfloat16_Original", "model_base_model": "", "model_weight_type": "Original", "model_revision": "main", "model_private": false, "model_type": "🟒 : pretrained", "model_architectures": "LlamaForCausalLM", "submitted_time": "2024-02-05T23:18:12", "lm_eval_model_type": "huggingface", "eval_version": "1.1.0" }
{ "all_grouped_average": 0.5008312063529308, "all_grouped_npm": 0.21447587948524316, "all_grouped": { "enem_challenge": 0.5423372988103569, "bluex": 0.4673157162726008, "oab_exams": 0.4328018223234624, "assin2_rte": 0.40523403335417163, "assin2_sts": 0.4964641013268987, "faquad_nli": 0.4396551724137931, "hatebr_offensive": 0.4892942520605069, "portuguese_hate_speech": 0.6053769911504425, "tweetsentbr": 0.6290014694641435 }, "all": { "harness|enem_challenge|enem_challenge|None|3": 0.5423372988103569, "harness|bluex|bluex|None|3": 0.4673157162726008, "harness|oab_exams|oab_exams|None|3": 0.4328018223234624, "harness|assin2_rte|assin2_rte|None|15": 0.40523403335417163, "harness|assin2_sts|assin2_sts|None|15": 0.4964641013268987, "harness|faquad_nli|faquad_nli|None|15": 0.4396551724137931, "harness|hatebr_offensive|hatebr_offensive|None|25": 0.4892942520605069, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": 0.6053769911504425, "harness|tweetsentbr|tweetsentbr|None|25": 0.6290014694641435 }, "harness|enem_challenge|enem_challenge|None|3": { "acc,all": 0.5423372988103569, "acc,exam_id__2013": 0.5277777777777778, "acc,exam_id__2022": 0.5413533834586466, "acc,exam_id__2023": 0.5555555555555556, "acc,exam_id__2016_2": 0.4796747967479675, "acc,exam_id__2010": 0.5128205128205128, "acc,exam_id__2014": 0.5137614678899083, "acc,exam_id__2009": 0.4956521739130435, "acc,exam_id__2015": 0.5294117647058824, "acc,exam_id__2016": 0.5785123966942148, "acc,exam_id__2012": 0.5775862068965517, "acc,exam_id__2011": 0.5897435897435898, "acc,exam_id__2017": 0.603448275862069, "main_score": 0.5423372988103569 }, "harness|bluex|bluex|None|3": { "acc,all": 0.4673157162726008, "acc,exam_id__UNICAMP_2021_2": 0.4117647058823529, "acc,exam_id__USP_2022": 0.40816326530612246, "acc,exam_id__USP_2023": 0.5681818181818182, "acc,exam_id__UNICAMP_2022": 0.46153846153846156, "acc,exam_id__UNICAMP_2023": 0.4883720930232558, "acc,exam_id__USP_2020": 0.4642857142857143, "acc,exam_id__USP_2024": 0.4878048780487805, "acc,exam_id__UNICAMP_2021_1": 0.5, "acc,exam_id__UNICAMP_2024": 0.6, "acc,exam_id__UNICAMP_2020": 0.4909090909090909, "acc,exam_id__USP_2019": 0.425, "acc,exam_id__USP_2018": 0.37037037037037035, "acc,exam_id__UNICAMP_2019": 0.48, "acc,exam_id__USP_2021": 0.5, "acc,exam_id__UNICAMP_2018": 0.3888888888888889, "main_score": 0.4673157162726008 }, "harness|oab_exams|oab_exams|None|3": { "acc,all": 0.4328018223234624, "acc,exam_id__2014-15": 0.48717948717948717, "acc,exam_id__2017-24": 0.375, "acc,exam_id__2012-06a": 0.4875, "acc,exam_id__2011-03": 0.42424242424242425, "acc,exam_id__2016-20a": 0.3375, "acc,exam_id__2012-06": 0.375, "acc,exam_id__2010-02": 0.41, "acc,exam_id__2015-16": 0.4, "acc,exam_id__2010-01": 0.3764705882352941, "acc,exam_id__2015-18": 0.5125, "acc,exam_id__2016-19": 0.44871794871794873, "acc,exam_id__2012-08": 0.4875, "acc,exam_id__2017-22": 0.475, "acc,exam_id__2018-25": 0.375, "acc,exam_id__2012-09": 0.3246753246753247, "acc,exam_id__2013-10": 0.4, "acc,exam_id__2011-04": 0.3375, "acc,exam_id__2015-17": 0.5256410256410257, "acc,exam_id__2011-05": 0.3875, "acc,exam_id__2016-21": 0.45, "acc,exam_id__2016-20": 0.4875, "acc,exam_id__2013-11": 0.425, "acc,exam_id__2012-07": 0.475, "acc,exam_id__2014-13": 0.475, "acc,exam_id__2017-23": 0.475, "acc,exam_id__2013-12": 0.5, "acc,exam_id__2014-14": 0.4625, "main_score": 0.4328018223234624 }, "harness|assin2_rte|assin2_rte|None|15": { "f1_macro,all": 0.40523403335417163, "acc,all": 0.5330882352941176, "main_score": 0.40523403335417163 }, "harness|assin2_sts|assin2_sts|None|15": { "pearson,all": 0.4964641013268987, "mse,all": 1.4027859477124183, "main_score": 0.4964641013268987 }, "harness|faquad_nli|faquad_nli|None|15": { "f1_macro,all": 0.4396551724137931, "acc,all": 0.7846153846153846, "main_score": 0.4396551724137931 }, "harness|hatebr_offensive|hatebr_offensive|None|25": { "f1_macro,all": 0.4892942520605069, "acc,all": 0.5778571428571428, "main_score": 0.4892942520605069 }, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": { "f1_macro,all": 0.6053769911504425, "acc,all": 0.6921269095182139, "main_score": 0.6053769911504425 }, "harness|tweetsentbr|tweetsentbr|None|25": { "f1_macro,all": 0.6290014694641435, "acc,all": 0.6646766169154229, "main_score": 0.6290014694641435 } }
{ "harness|enem_challenge|enem_challenge": "LM Harness task", "harness|bluex|bluex": "LM Harness task", "harness|oab_exams|oab_exams": "LM Harness task", "harness|assin2_rte|assin2_rte": "LM Harness task", "harness|assin2_sts|assin2_sts": "LM Harness task", "harness|faquad_nli|faquad_nli": "LM Harness task", "harness|hatebr_offensive|hatebr_offensive": "LM Harness task", "harness|portuguese_hate_speech|portuguese_hate_speech": "LM Harness task", "harness|tweetsentbr|tweetsentbr": "LM Harness task" }
{ "all": 0, "harness|enem_challenge|enem_challenge": 1.1, "harness|bluex|bluex": 1.1, "harness|oab_exams|oab_exams": 1.5, "harness|assin2_rte|assin2_rte": 1.1, "harness|assin2_sts|assin2_sts": 1.1, "harness|faquad_nli|faquad_nli": 1.1, "harness|hatebr_offensive|hatebr_offensive": 1, "harness|portuguese_hate_speech|portuguese_hate_speech": 1, "harness|tweetsentbr|tweetsentbr": 1 }
{ "harness|enem_challenge|enem_challenge|None|3": { "sample_size": 1429, "truncated": 2, "non_truncated": 1427, "padded": 0, "non_padded": 1429, "fewshots_truncated": 2, "mean_seq_length": 1773.6696990902728, "min_seq_length": 1479, "max_seq_length": 2789, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 2.998600419874038 }, "harness|bluex|bluex|None|3": { "sample_size": 719, "truncated": 3, "non_truncated": 716, "padded": 0, "non_padded": 719, "fewshots_truncated": 5, "mean_seq_length": 1887.1738525730182, "min_seq_length": 1475, "max_seq_length": 2769, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 2.9930458970792766 }, "harness|oab_exams|oab_exams|None|3": { "sample_size": 2195, "truncated": 0, "non_truncated": 2195, "padded": 0, "non_padded": 2195, "fewshots_truncated": 0, "mean_seq_length": 1523.7266514806379, "min_seq_length": 1223, "max_seq_length": 2061, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|assin2_rte|assin2_rte|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1501.5265522875818, "min_seq_length": 1477, "max_seq_length": 1573, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|assin2_sts|assin2_sts|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1720.5265522875818, "min_seq_length": 1696, "max_seq_length": 1792, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|faquad_nli|faquad_nli|None|15": { "sample_size": 650, "truncated": 0, "non_truncated": 650, "padded": 0, "non_padded": 650, "fewshots_truncated": 0, "mean_seq_length": 1760.1292307692308, "min_seq_length": 1700, "max_seq_length": 1893, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|hatebr_offensive|hatebr_offensive|None|25": { "sample_size": 1400, "truncated": 0, "non_truncated": 1400, "padded": 0, "non_padded": 1400, "fewshots_truncated": 0, "mean_seq_length": 1417.9257142857143, "min_seq_length": 1390, "max_seq_length": 1696, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": { "sample_size": 851, "truncated": 0, "non_truncated": 851, "padded": 0, "non_padded": 851, "fewshots_truncated": 0, "mean_seq_length": 1945.7544065804934, "min_seq_length": 1908, "max_seq_length": 1981, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|tweetsentbr|tweetsentbr|None|25": { "sample_size": 2010, "truncated": 0, "non_truncated": 2010, "padded": 0, "non_padded": 2010, "fewshots_truncated": 0, "mean_seq_length": 1763.844776119403, "min_seq_length": 1741, "max_seq_length": 1879, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 } }
{ "truncated": 5, "non_truncated": 14145, "padded": 0, "non_padded": 14150, "fewshots_truncated": 7 }
{ "start_date": "2024-02-28T14-35-07.615539", "start_time": 1709130908.5578952, "end_time": 1709137528.6014702, "total_evaluation_time_seconds": 6620.043575048447, "has_chat_template": true, "chat_type": "system_user_assistant", "n_gpus": 1, "accelerate_num_process": null, "model_sha": "f14752bd87e64d8d614d4f5bec6660b3e95ae528", "model_dtype": "bfloat16", "model_memory_footprint": 12256296960, "model_num_parameters": 6061035520, "model_is_loaded_in_4bit": null, "model_is_loaded_in_8bit": null, "model_is_quantized": null, "model_device": "cuda:0", "batch_size": 8, "max_length": 4096, "max_ctx_length": 4064, "max_gen_toks": 32, "model_name": "01-ai/Yi-6B-Chat", "job_id": 273, "model_id": "01-ai/Yi-6B-Chat_eval_request_False_bfloat16_Original", "model_base_model": "", "model_weight_type": "Original", "model_revision": "main", "model_private": false, "model_type": "πŸ’¬ : chat models (RLHF, DPO, IFT, ...)", "model_architectures": "LlamaForCausalLM", "submitted_time": "2024-02-27T00:40:39", "lm_eval_model_type": "huggingface", "eval_version": "1.1.0" }
{ "all_grouped_average": 0.600468220733431, "all_grouped_npm": 0.40260964945878, "all_grouped": { "enem_challenge": 0.5570328901329601, "bluex": 0.5006954102920723, "oab_exams": 0.4118451025056948, "assin2_rte": 0.7948490568935549, "assin2_sts": 0.5684271643349206, "faquad_nli": 0.637960088691796, "hatebr_offensive": 0.775686136523575, "portuguese_hate_speech": 0.5712377041472934, "tweetsentbr": 0.5864804330790114 }, "all": { "harness|enem_challenge|enem_challenge|None|3": 0.5570328901329601, "harness|bluex|bluex|None|3": 0.5006954102920723, "harness|oab_exams|oab_exams|None|3": 0.4118451025056948, "harness|assin2_rte|assin2_rte|None|15": 0.7948490568935549, "harness|assin2_sts|assin2_sts|None|15": 0.5684271643349206, "harness|faquad_nli|faquad_nli|None|15": 0.637960088691796, "harness|hatebr_offensive|hatebr_offensive|None|25": 0.775686136523575, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": 0.5712377041472934, "harness|tweetsentbr|tweetsentbr|None|25": 0.5864804330790114 }, "harness|enem_challenge|enem_challenge|None|3": { "acc,all": 0.5570328901329601, "acc,exam_id__2016_2": 0.4959349593495935, "acc,exam_id__2023": 0.5777777777777777, "acc,exam_id__2014": 0.4954128440366973, "acc,exam_id__2017": 0.603448275862069, "acc,exam_id__2009": 0.5217391304347826, "acc,exam_id__2015": 0.5966386554621849, "acc,exam_id__2016": 0.5371900826446281, "acc,exam_id__2022": 0.5639097744360902, "acc,exam_id__2012": 0.5517241379310345, "acc,exam_id__2013": 0.5833333333333334, "acc,exam_id__2011": 0.6068376068376068, "acc,exam_id__2010": 0.5470085470085471, "main_score": 0.5570328901329601 }, "harness|bluex|bluex|None|3": { "acc,all": 0.5006954102920723, "acc,exam_id__USP_2023": 0.6136363636363636, "acc,exam_id__UNICAMP_2023": 0.4883720930232558, "acc,exam_id__UNICAMP_2024": 0.5777777777777777, "acc,exam_id__USP_2021": 0.5576923076923077, "acc,exam_id__UNICAMP_2021_2": 0.49019607843137253, "acc,exam_id__UNICAMP_2019": 0.46, "acc,exam_id__UNICAMP_2022": 0.48717948717948717, "acc,exam_id__UNICAMP_2018": 0.35185185185185186, "acc,exam_id__UNICAMP_2020": 0.509090909090909, "acc,exam_id__USP_2020": 0.5357142857142857, "acc,exam_id__USP_2018": 0.4074074074074074, "acc,exam_id__USP_2019": 0.4, "acc,exam_id__UNICAMP_2021_1": 0.5434782608695652, "acc,exam_id__USP_2024": 0.6341463414634146, "acc,exam_id__USP_2022": 0.4897959183673469, "main_score": 0.5006954102920723 }, "harness|oab_exams|oab_exams|None|3": { "acc,all": 0.4118451025056948, "acc,exam_id__2012-08": 0.45, "acc,exam_id__2015-17": 0.5, "acc,exam_id__2012-09": 0.4155844155844156, "acc,exam_id__2013-11": 0.4125, "acc,exam_id__2014-13": 0.4125, "acc,exam_id__2012-06": 0.4, "acc,exam_id__2017-24": 0.4125, "acc,exam_id__2010-01": 0.29411764705882354, "acc,exam_id__2016-20a": 0.3375, "acc,exam_id__2012-06a": 0.4875, "acc,exam_id__2017-23": 0.4875, "acc,exam_id__2014-14": 0.475, "acc,exam_id__2018-25": 0.4375, "acc,exam_id__2013-10": 0.3375, "acc,exam_id__2011-05": 0.375, "acc,exam_id__2017-22": 0.4375, "acc,exam_id__2011-03": 0.29292929292929293, "acc,exam_id__2016-21": 0.3625, "acc,exam_id__2015-16": 0.3875, "acc,exam_id__2011-04": 0.375, "acc,exam_id__2016-20": 0.45, "acc,exam_id__2014-15": 0.3974358974358974, "acc,exam_id__2012-07": 0.4, "acc,exam_id__2016-19": 0.4230769230769231, "acc,exam_id__2015-18": 0.45, "acc,exam_id__2013-12": 0.4875, "acc,exam_id__2010-02": 0.45, "main_score": 0.4118451025056948 }, "harness|assin2_rte|assin2_rte|None|15": { "f1_macro,all": 0.7948490568935549, "acc,all": 0.7949346405228758, "main_score": 0.7948490568935549 }, "harness|assin2_sts|assin2_sts|None|15": { "pearson,all": 0.5684271643349206, "mse,all": 1.246441993464052, "main_score": 0.5684271643349206 }, "harness|faquad_nli|faquad_nli|None|15": { "f1_macro,all": 0.637960088691796, "acc,all": 0.7584615384615384, "main_score": 0.637960088691796 }, "harness|hatebr_offensive|hatebr_offensive|None|25": { "f1_macro,all": 0.775686136523575, "acc,all": 0.7814285714285715, "main_score": 0.775686136523575 }, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": { "f1_macro,all": 0.5712377041472934, "acc,all": 0.5781433607520564, "main_score": 0.5712377041472934 }, "harness|tweetsentbr|tweetsentbr|None|25": { "f1_macro,all": 0.5864804330790114, "acc,all": 0.6577114427860696, "main_score": 0.5864804330790114 } }
{ "harness|enem_challenge|enem_challenge": "LM Harness task", "harness|bluex|bluex": "LM Harness task", "harness|oab_exams|oab_exams": "LM Harness task", "harness|assin2_rte|assin2_rte": "LM Harness task", "harness|assin2_sts|assin2_sts": "LM Harness task", "harness|faquad_nli|faquad_nli": "LM Harness task", "harness|hatebr_offensive|hatebr_offensive": "LM Harness task", "harness|portuguese_hate_speech|portuguese_hate_speech": "LM Harness task", "harness|tweetsentbr|tweetsentbr": "LM Harness task" }
{ "all": 0, "harness|enem_challenge|enem_challenge": 1.1, "harness|bluex|bluex": 1.1, "harness|oab_exams|oab_exams": 1.5, "harness|assin2_rte|assin2_rte": 1.1, "harness|assin2_sts|assin2_sts": 1.1, "harness|faquad_nli|faquad_nli": 1.1, "harness|hatebr_offensive|hatebr_offensive": 1, "harness|portuguese_hate_speech|portuguese_hate_speech": 1, "harness|tweetsentbr|tweetsentbr": 1 }
{ "harness|enem_challenge|enem_challenge|None|3": { "sample_size": 1429, "truncated": 0, "non_truncated": 1429, "padded": 0, "non_padded": 1429, "fewshots_truncated": 0, "mean_seq_length": 1816.6696990902728, "min_seq_length": 1522, "max_seq_length": 2832, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|bluex|bluex|None|3": { "sample_size": 719, "truncated": 0, "non_truncated": 719, "padded": 0, "non_padded": 719, "fewshots_truncated": 0, "mean_seq_length": 1930.1738525730182, "min_seq_length": 1518, "max_seq_length": 2812, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|oab_exams|oab_exams|None|3": { "sample_size": 2195, "truncated": 0, "non_truncated": 2195, "padded": 0, "non_padded": 2195, "fewshots_truncated": 0, "mean_seq_length": 1566.7266514806379, "min_seq_length": 1266, "max_seq_length": 2104, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|assin2_rte|assin2_rte|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1664.5265522875818, "min_seq_length": 1640, "max_seq_length": 1736, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|assin2_sts|assin2_sts|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1868.5265522875818, "min_seq_length": 1844, "max_seq_length": 1940, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|faquad_nli|faquad_nli|None|15": { "sample_size": 650, "truncated": 0, "non_truncated": 650, "padded": 0, "non_padded": 650, "fewshots_truncated": 0, "mean_seq_length": 1923.1292307692308, "min_seq_length": 1863, "max_seq_length": 2056, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|hatebr_offensive|hatebr_offensive|None|25": { "sample_size": 1400, "truncated": 0, "non_truncated": 1400, "padded": 0, "non_padded": 1400, "fewshots_truncated": 0, "mean_seq_length": 1680.9257142857143, "min_seq_length": 1653, "max_seq_length": 1959, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": { "sample_size": 851, "truncated": 0, "non_truncated": 851, "padded": 0, "non_padded": 851, "fewshots_truncated": 0, "mean_seq_length": 2208.7544065804937, "min_seq_length": 2171, "max_seq_length": 2244, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|tweetsentbr|tweetsentbr|None|25": { "sample_size": 2010, "truncated": 0, "non_truncated": 2010, "padded": 0, "non_padded": 2010, "fewshots_truncated": 0, "mean_seq_length": 1894.844776119403, "min_seq_length": 1872, "max_seq_length": 2010, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 } }
{ "truncated": 0, "non_truncated": 14150, "padded": 0, "non_padded": 14150, "fewshots_truncated": 0 }
{ "start_date": "2024-02-06T22-20-36.837117", "start_time": 1707258037.4485347, "end_time": 1707261506.7127337, "total_evaluation_time_seconds": 3469.2641990184784, "has_chat_template": false, "chat_type": null, "n_gpus": 1, "accelerate_num_process": null, "model_sha": "59c6151fb7880f89c01a95a76634e17cd09a2e94", "model_dtype": "bfloat16", "model_memory_footprint": 12189188096, "model_num_parameters": 6061035520, "model_is_loaded_in_4bit": false, "model_is_loaded_in_8bit": false, "model_is_quantized": null, "model_device": "cuda:1", "batch_size": 16, "max_length": 4096, "max_ctx_length": 4064, "max_gen_toks": 32, "model_name": "01-ai/Yi-6B", "job_id": 22, "model_id": "01-ai/Yi-6B_eval_request_False_bfloat16_Original", "model_base_model": "", "model_weight_type": "Original", "model_revision": "main", "model_private": false, "model_type": "🟒 : pretrained", "model_architectures": "LlamaForCausalLM", "submitted_time": "2024-02-05T23:04:05", "lm_eval_model_type": "huggingface", "eval_version": "1.0.0" }
{ "all_grouped_average": 0.47236788898729504, "all_grouped_npm": 0.24469294516758852, "all_grouped": { "enem_challenge": 0.5724282715185445, "bluex": 0.4951321279554937, "oab_exams": 0.44009111617312074, "assin2_rte": 0.48362346630826814, "assin2_sts": 0.44473899241461606, "faquad_nli": 0.6078037007240547, "sparrow_pt": 0.2627575478169676 }, "all": { "harness|enem_challenge|enem_challenge|None|3": 0.5724282715185445, "harness|bluex|bluex|None|3": 0.4951321279554937, "harness|oab_exams|oab_exams|None|3": 0.44009111617312074, "harness|assin2_rte|assin2_rte|None|15": 0.48362346630826814, "harness|assin2_sts|assin2_sts|None|15": 0.44473899241461606, "harness|faquad_nli|faquad_nli|None|15": 0.6078037007240547, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": 0.03940263092992762, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": 0.4053965389977856, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": 0.3805098664137443, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": 0.2257211549264128 }, "harness|enem_challenge|enem_challenge|None|3": { "acc,all": 0.5724282715185445, "acc,exam_id__2016_2": 0.5365853658536586, "acc,exam_id__2016": 0.5537190082644629, "acc,exam_id__2010": 0.5641025641025641, "acc,exam_id__2014": 0.5871559633027523, "acc,exam_id__2023": 0.5925925925925926, "acc,exam_id__2017": 0.603448275862069, "acc,exam_id__2015": 0.5882352941176471, "acc,exam_id__2012": 0.5689655172413793, "acc,exam_id__2022": 0.5639097744360902, "acc,exam_id__2013": 0.5925925925925926, "acc,exam_id__2009": 0.5043478260869565, "acc,exam_id__2011": 0.6153846153846154, "main_score": 0.5724282715185445 }, "harness|bluex|bluex|None|3": { "acc,all": 0.4951321279554937, "acc,exam_id__USP_2019": 0.525, "acc,exam_id__UNICAMP_2018": 0.35185185185185186, "acc,exam_id__UNICAMP_2020": 0.4727272727272727, "acc,exam_id__UNICAMP_2023": 0.6046511627906976, "acc,exam_id__UNICAMP_2024": 0.5777777777777777, "acc,exam_id__UNICAMP_2019": 0.42, "acc,exam_id__UNICAMP_2021_2": 0.49019607843137253, "acc,exam_id__USP_2022": 0.5510204081632653, "acc,exam_id__USP_2020": 0.48214285714285715, "acc,exam_id__USP_2018": 0.42592592592592593, "acc,exam_id__USP_2021": 0.36538461538461536, "acc,exam_id__USP_2023": 0.5681818181818182, "acc,exam_id__USP_2024": 0.5609756097560976, "acc,exam_id__UNICAMP_2021_1": 0.5217391304347826, "acc,exam_id__UNICAMP_2022": 0.6153846153846154, "main_score": 0.4951321279554937 }, "harness|oab_exams|oab_exams|None|3": { "acc,all": 0.44009111617312074, "acc,exam_id__2014-13": 0.4125, "acc,exam_id__2012-07": 0.4125, "acc,exam_id__2012-09": 0.36363636363636365, "acc,exam_id__2011-03": 0.35353535353535354, "acc,exam_id__2012-08": 0.5125, "acc,exam_id__2015-17": 0.5256410256410257, "acc,exam_id__2014-15": 0.4358974358974359, "acc,exam_id__2017-24": 0.4375, "acc,exam_id__2015-16": 0.45, "acc,exam_id__2017-23": 0.4875, "acc,exam_id__2011-04": 0.3625, "acc,exam_id__2010-02": 0.47, "acc,exam_id__2016-19": 0.47435897435897434, "acc,exam_id__2012-06": 0.45, "acc,exam_id__2012-06a": 0.45, "acc,exam_id__2013-12": 0.525, "acc,exam_id__2017-22": 0.425, "acc,exam_id__2010-01": 0.2823529411764706, "acc,exam_id__2013-10": 0.425, "acc,exam_id__2014-14": 0.5125, "acc,exam_id__2018-25": 0.55, "acc,exam_id__2016-20a": 0.3625, "acc,exam_id__2015-18": 0.4875, "acc,exam_id__2011-05": 0.45, "acc,exam_id__2013-11": 0.45, "acc,exam_id__2016-20": 0.4375, "acc,exam_id__2016-21": 0.4, "main_score": 0.44009111617312074 }, "harness|assin2_rte|assin2_rte|None|15": { "f1_macro,all": 0.48362346630826814, "acc,all": 0.5755718954248366, "main_score": 0.48362346630826814 }, "harness|assin2_sts|assin2_sts|None|15": { "pearson,all": 0.44473899241461606, "mse,all": 2.0918790849673203, "main_score": 0.44473899241461606 }, "harness|faquad_nli|faquad_nli|None|15": { "f1_macro,all": 0.6078037007240547, "acc,all": 0.6676923076923077, "main_score": 0.6078037007240547 }, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": { "f1_macro,all": 0.03940263092992762, "acc,all": 0.104, "main_score": 0.03940263092992762 }, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": { "f1_macro,all": 0.4053965389977856, "acc,all": 0.652, "main_score": 0.4053965389977856 }, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": { "f1_macro,all": 0.3805098664137443, "acc,all": 0.73, "main_score": 0.3805098664137443 }, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": { "f1_macro,all": 0.2257211549264128, "acc,all": 0.366, "main_score": 0.2257211549264128 } }
{ "harness|enem_challenge|enem_challenge": "LM Harness task", "harness|bluex|bluex": "LM Harness task", "harness|oab_exams|oab_exams": "LM Harness task", "harness|assin2_rte|assin2_rte": "LM Harness task", "harness|assin2_sts|assin2_sts": "LM Harness task", "harness|faquad_nli|faquad_nli": "LM Harness task", "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por": "LM Harness task", "harness|sparrow_pt|sparrow_hate-2019-fortuna-por": "LM Harness task", "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por": "LM Harness task", "harness|sparrow_pt|sparrow_sentiment-2018-brum-por": "LM Harness task" }
{ "all": 0, "harness|enem_challenge|enem_challenge": 1, "harness|bluex|bluex": 1, "harness|oab_exams|oab_exams": 1.4, "harness|assin2_rte|assin2_rte": 1, "harness|assin2_sts|assin2_sts": 1, "harness|faquad_nli|faquad_nli": 1, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por": 1, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por": 1, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por": 1, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por": 1 }
{ "harness|enem_challenge|enem_challenge|None|3": { "sample_size": 1429, "truncated": 0, "non_truncated": 1429, "padded": 0, "non_padded": 1429, "fewshots_truncated": 0, "mean_seq_length": 1749.6696990902728, "min_seq_length": 1455, "max_seq_length": 2765, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|bluex|bluex|None|3": { "sample_size": 719, "truncated": 0, "non_truncated": 719, "padded": 0, "non_padded": 719, "fewshots_truncated": 0, "mean_seq_length": 1473.1738525730182, "min_seq_length": 1061, "max_seq_length": 2355, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|oab_exams|oab_exams|None|3": { "sample_size": 2195, "truncated": 0, "non_truncated": 2195, "padded": 0, "non_padded": 2195, "fewshots_truncated": 0, "mean_seq_length": 1501.7266514806379, "min_seq_length": 1201, "max_seq_length": 2039, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|assin2_rte|assin2_rte|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1376.5265522875818, "min_seq_length": 1352, "max_seq_length": 1448, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|assin2_sts|assin2_sts|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1558.5265522875818, "min_seq_length": 1534, "max_seq_length": 1630, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|faquad_nli|faquad_nli|None|15": { "sample_size": 650, "truncated": 0, "non_truncated": 650, "padded": 0, "non_padded": 650, "fewshots_truncated": 0, "mean_seq_length": 1721.1292307692308, "min_seq_length": 1661, "max_seq_length": 1854, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1852.022, "min_seq_length": 1827, "max_seq_length": 1894, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1902.65, "min_seq_length": 1876, "max_seq_length": 1944, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1546.77, "min_seq_length": 1527, "max_seq_length": 1591, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1714.068, "min_seq_length": 1694, "max_seq_length": 1755, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 } }
{ "truncated": 0, "non_truncated": 11889, "padded": 0, "non_padded": 11889, "fewshots_truncated": 0 }
{ "start_date": "2024-02-17T03-42-08.504508", "start_time": 1708141329.159125, "end_time": 1708149318.0005805, "total_evaluation_time_seconds": 7988.841455459595, "has_chat_template": false, "chat_type": null, "n_gpus": 1, "accelerate_num_process": null, "model_sha": "c2532d3c48f214e313556f9a48e8ed1083c0b9e9", "model_dtype": "bfloat16", "model_memory_footprint": 12189188096, "model_num_parameters": 6061035520, "model_is_loaded_in_4bit": false, "model_is_loaded_in_8bit": false, "model_is_quantized": null, "model_device": "cuda:1", "batch_size": 16, "max_length": 4096, "max_ctx_length": 4064, "max_gen_toks": 32, "model_name": "01-ai/Yi-6B", "job_id": 228, "model_id": "01-ai/Yi-6B_eval_request_False_bfloat16_Original", "model_base_model": "", "model_weight_type": "Original", "model_revision": "main", "model_private": false, "model_type": "🟒 : pretrained", "model_architectures": "LlamaForCausalLM", "submitted_time": "2024-02-05T23:04:05", "lm_eval_model_type": "huggingface", "eval_version": "1.1.0" }
{ "all_grouped_average": 0.5947625326022572, "all_grouped_npm": 0.39162578790069535, "all_grouped": { "enem_challenge": 0.5689293212036389, "bluex": 0.5132127955493742, "oab_exams": 0.4460136674259681, "assin2_rte": 0.7903932929806128, "assin2_sts": 0.5666878345297481, "faquad_nli": 0.5985418799210473, "hatebr_offensive": 0.7425595238095237, "portuguese_hate_speech": 0.6184177704320946, "tweetsentbr": 0.5081067075683067 }, "all": { "harness|enem_challenge|enem_challenge|None|3": 0.5689293212036389, "harness|bluex|bluex|None|3": 0.5132127955493742, "harness|oab_exams|oab_exams|None|3": 0.4460136674259681, "harness|assin2_rte|assin2_rte|None|15": 0.7903932929806128, "harness|assin2_sts|assin2_sts|None|15": 0.5666878345297481, "harness|faquad_nli|faquad_nli|None|15": 0.5985418799210473, "harness|hatebr_offensive|hatebr_offensive|None|25": 0.7425595238095237, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": 0.6184177704320946, "harness|tweetsentbr|tweetsentbr|None|25": 0.5081067075683067 }, "harness|enem_challenge|enem_challenge|None|3": { "acc,all": 0.5689293212036389, "acc,exam_id__2023": 0.5925925925925926, "acc,exam_id__2010": 0.5726495726495726, "acc,exam_id__2016_2": 0.5203252032520326, "acc,exam_id__2011": 0.6153846153846154, "acc,exam_id__2022": 0.5263157894736842, "acc,exam_id__2015": 0.5798319327731093, "acc,exam_id__2013": 0.5833333333333334, "acc,exam_id__2014": 0.5779816513761468, "acc,exam_id__2009": 0.5478260869565217, "acc,exam_id__2016": 0.5289256198347108, "acc,exam_id__2012": 0.5775862068965517, "acc,exam_id__2017": 0.6120689655172413, "main_score": 0.5689293212036389 }, "harness|bluex|bluex|None|3": { "acc,all": 0.5132127955493742, "acc,exam_id__UNICAMP_2020": 0.4909090909090909, "acc,exam_id__UNICAMP_2021_2": 0.5098039215686274, "acc,exam_id__USP_2018": 0.42592592592592593, "acc,exam_id__UNICAMP_2023": 0.5581395348837209, "acc,exam_id__USP_2024": 0.6097560975609756, "acc,exam_id__UNICAMP_2021_1": 0.4782608695652174, "acc,exam_id__USP_2020": 0.5178571428571429, "acc,exam_id__USP_2019": 0.425, "acc,exam_id__USP_2023": 0.5909090909090909, "acc,exam_id__UNICAMP_2019": 0.46, "acc,exam_id__USP_2022": 0.5510204081632653, "acc,exam_id__UNICAMP_2024": 0.5777777777777777, "acc,exam_id__USP_2021": 0.4807692307692308, "acc,exam_id__UNICAMP_2022": 0.6153846153846154, "acc,exam_id__UNICAMP_2018": 0.46296296296296297, "main_score": 0.5132127955493742 }, "harness|oab_exams|oab_exams|None|3": { "acc,all": 0.4460136674259681, "acc,exam_id__2016-20": 0.4625, "acc,exam_id__2015-17": 0.5128205128205128, "acc,exam_id__2014-13": 0.4, "acc,exam_id__2018-25": 0.525, "acc,exam_id__2017-22": 0.45, "acc,exam_id__2014-14": 0.5375, "acc,exam_id__2013-11": 0.425, "acc,exam_id__2015-18": 0.5125, "acc,exam_id__2011-05": 0.475, "acc,exam_id__2012-08": 0.475, "acc,exam_id__2012-06a": 0.45, "acc,exam_id__2017-24": 0.4, "acc,exam_id__2012-07": 0.425, "acc,exam_id__2016-20a": 0.3875, "acc,exam_id__2016-21": 0.4, "acc,exam_id__2016-19": 0.44871794871794873, "acc,exam_id__2010-01": 0.2823529411764706, "acc,exam_id__2011-04": 0.375, "acc,exam_id__2012-06": 0.4125, "acc,exam_id__2017-23": 0.5, "acc,exam_id__2012-09": 0.4025974025974026, "acc,exam_id__2013-12": 0.525, "acc,exam_id__2013-10": 0.45, "acc,exam_id__2011-03": 0.37373737373737376, "acc,exam_id__2015-16": 0.5, "acc,exam_id__2010-02": 0.5, "acc,exam_id__2014-15": 0.44871794871794873, "main_score": 0.4460136674259681 }, "harness|assin2_rte|assin2_rte|None|15": { "f1_macro,all": 0.7903932929806128, "acc,all": 0.7904411764705882, "main_score": 0.7903932929806128 }, "harness|assin2_sts|assin2_sts|None|15": { "pearson,all": 0.5666878345297481, "mse,all": 1.3787418300653598, "main_score": 0.5666878345297481 }, "harness|faquad_nli|faquad_nli|None|15": { "f1_macro,all": 0.5985418799210473, "acc,all": 0.78, "main_score": 0.5985418799210473 }, "harness|hatebr_offensive|hatebr_offensive|None|25": { "f1_macro,all": 0.7425595238095237, "acc,all": 0.7528571428571429, "main_score": 0.7425595238095237 }, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": { "f1_macro,all": 0.6184177704320946, "acc,all": 0.6556991774383079, "main_score": 0.6184177704320946 }, "harness|tweetsentbr|tweetsentbr|None|25": { "f1_macro,all": 0.5081067075683067, "acc,all": 0.5263681592039801, "main_score": 0.5081067075683067 } }
{ "harness|enem_challenge|enem_challenge": "LM Harness task", "harness|bluex|bluex": "LM Harness task", "harness|oab_exams|oab_exams": "LM Harness task", "harness|assin2_rte|assin2_rte": "LM Harness task", "harness|assin2_sts|assin2_sts": "LM Harness task", "harness|faquad_nli|faquad_nli": "LM Harness task", "harness|hatebr_offensive|hatebr_offensive": "LM Harness task", "harness|portuguese_hate_speech|portuguese_hate_speech": "LM Harness task", "harness|tweetsentbr|tweetsentbr": "LM Harness task" }
{ "all": 0, "harness|enem_challenge|enem_challenge": 1.1, "harness|bluex|bluex": 1.1, "harness|oab_exams|oab_exams": 1.5, "harness|assin2_rte|assin2_rte": 1.1, "harness|assin2_sts|assin2_sts": 1.1, "harness|faquad_nli|faquad_nli": 1.1, "harness|hatebr_offensive|hatebr_offensive": 1, "harness|portuguese_hate_speech|portuguese_hate_speech": 1, "harness|tweetsentbr|tweetsentbr": 1 }
{ "harness|enem_challenge|enem_challenge|None|3": { "sample_size": 1429, "truncated": 0, "non_truncated": 1429, "padded": 0, "non_padded": 1429, "fewshots_truncated": 0, "mean_seq_length": 1773.6696990902728, "min_seq_length": 1479, "max_seq_length": 2789, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|bluex|bluex|None|3": { "sample_size": 719, "truncated": 0, "non_truncated": 719, "padded": 0, "non_padded": 719, "fewshots_truncated": 0, "mean_seq_length": 1887.1738525730182, "min_seq_length": 1475, "max_seq_length": 2769, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|oab_exams|oab_exams|None|3": { "sample_size": 2195, "truncated": 0, "non_truncated": 2195, "padded": 0, "non_padded": 2195, "fewshots_truncated": 0, "mean_seq_length": 1523.7266514806379, "min_seq_length": 1223, "max_seq_length": 2061, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|assin2_rte|assin2_rte|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1501.5265522875818, "min_seq_length": 1477, "max_seq_length": 1573, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|assin2_sts|assin2_sts|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1720.5265522875818, "min_seq_length": 1696, "max_seq_length": 1792, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|faquad_nli|faquad_nli|None|15": { "sample_size": 650, "truncated": 0, "non_truncated": 650, "padded": 0, "non_padded": 650, "fewshots_truncated": 0, "mean_seq_length": 1760.1292307692308, "min_seq_length": 1700, "max_seq_length": 1893, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|hatebr_offensive|hatebr_offensive|None|25": { "sample_size": 1400, "truncated": 0, "non_truncated": 1400, "padded": 0, "non_padded": 1400, "fewshots_truncated": 0, "mean_seq_length": 1417.9257142857143, "min_seq_length": 1390, "max_seq_length": 1696, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": { "sample_size": 851, "truncated": 0, "non_truncated": 851, "padded": 0, "non_padded": 851, "fewshots_truncated": 0, "mean_seq_length": 1945.7544065804934, "min_seq_length": 1908, "max_seq_length": 1981, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|tweetsentbr|tweetsentbr|None|25": { "sample_size": 2010, "truncated": 0, "non_truncated": 2010, "padded": 0, "non_padded": 2010, "fewshots_truncated": 0, "mean_seq_length": 1614.844776119403, "min_seq_length": 1592, "max_seq_length": 1730, "max_ctx_length": 4064, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 } }
{ "truncated": 0, "non_truncated": 14150, "padded": 0, "non_padded": 14150, "fewshots_truncated": 0 }
{ "start_date": "2024-04-14T12-49-52.148781", "start_time": 1713098993.2055054, "end_time": 1713147692.7594829, "total_evaluation_time_seconds": 48699.55397748947, "has_chat_template": false, "chat_type": null, "n_gpus": 1, "accelerate_num_process": null, "model_sha": "a293a71f60520015aa4d8482cc3150d756a73247", "model_dtype": "bfloat16", "model_memory_footprint": 24101277696, "model_num_parameters": 8829407232, "model_is_loaded_in_4bit": null, "model_is_loaded_in_8bit": null, "model_is_quantized": null, "model_device": "cuda:0", "batch_size": 2, "max_length": 2560, "max_ctx_length": 2528, "max_gen_toks": 32, "model_name": "01-ai/Yi-9B-200k", "job_id": 451, "model_id": "01-ai/Yi-9B-200k_eval_request_False_bfloat16_Original", "model_base_model": "", "model_weight_type": "Original", "model_revision": "main", "model_private": false, "model_type": "🟒 : pretrained", "model_architectures": "LlamaForCausalLM", "submitted_time": "2024-04-13T05:22:25", "lm_eval_model_type": "huggingface", "eval_version": "1.1.0" }
{ "all_grouped_average": 0.6942109164969046, "all_grouped_npm": 0.542160434184262, "all_grouped": { "enem_challenge": 0.6564030790762772, "bluex": 0.5354659248956884, "oab_exams": 0.5056947608200456, "assin2_rte": 0.8708321784112503, "assin2_sts": 0.7508245525986388, "faquad_nli": 0.7162112665738773, "hatebr_offensive": 0.8238294119604646, "portuguese_hate_speech": 0.6723821369343758, "tweetsentbr": 0.7162549372015228 }, "all": { "harness|enem_challenge|enem_challenge|None|3": 0.6564030790762772, "harness|bluex|bluex|None|3": 0.5354659248956884, "harness|oab_exams|oab_exams|None|3": 0.5056947608200456, "harness|assin2_rte|assin2_rte|None|15": 0.8708321784112503, "harness|assin2_sts|assin2_sts|None|15": 0.7508245525986388, "harness|faquad_nli|faquad_nli|None|15": 0.7162112665738773, "harness|hatebr_offensive|hatebr_offensive|None|25": 0.8238294119604646, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": 0.6723821369343758, "harness|tweetsentbr|tweetsentbr|None|25": 0.7162549372015228 }, "harness|enem_challenge|enem_challenge|None|3": { "acc,all": 0.6564030790762772, "acc,exam_id__2013": 0.6111111111111112, "acc,exam_id__2016": 0.6776859504132231, "acc,exam_id__2010": 0.6837606837606838, "acc,exam_id__2017": 0.6206896551724138, "acc,exam_id__2012": 0.6896551724137931, "acc,exam_id__2014": 0.6605504587155964, "acc,exam_id__2016_2": 0.6910569105691057, "acc,exam_id__2009": 0.6260869565217392, "acc,exam_id__2015": 0.6470588235294118, "acc,exam_id__2011": 0.7350427350427351, "acc,exam_id__2023": 0.6148148148148148, "acc,exam_id__2022": 0.6240601503759399, "main_score": 0.6564030790762772 }, "harness|bluex|bluex|None|3": { "acc,all": 0.5354659248956884, "acc,exam_id__UNICAMP_2024": 0.5111111111111111, "acc,exam_id__UNICAMP_2019": 0.52, "acc,exam_id__UNICAMP_2021_1": 0.5434782608695652, "acc,exam_id__USP_2021": 0.5, "acc,exam_id__UNICAMP_2022": 0.6410256410256411, "acc,exam_id__UNICAMP_2020": 0.509090909090909, "acc,exam_id__UNICAMP_2023": 0.6744186046511628, "acc,exam_id__USP_2019": 0.525, "acc,exam_id__UNICAMP_2018": 0.48148148148148145, "acc,exam_id__USP_2020": 0.5, "acc,exam_id__USP_2023": 0.6363636363636364, "acc,exam_id__USP_2018": 0.37037037037037035, "acc,exam_id__UNICAMP_2021_2": 0.45098039215686275, "acc,exam_id__USP_2024": 0.7317073170731707, "acc,exam_id__USP_2022": 0.5510204081632653, "main_score": 0.5354659248956884 }, "harness|oab_exams|oab_exams|None|3": { "acc,all": 0.5056947608200456, "acc,exam_id__2015-18": 0.525, "acc,exam_id__2012-09": 0.37662337662337664, "acc,exam_id__2016-20a": 0.4, "acc,exam_id__2013-11": 0.525, "acc,exam_id__2014-13": 0.55, "acc,exam_id__2016-20": 0.5375, "acc,exam_id__2014-15": 0.48717948717948717, "acc,exam_id__2014-14": 0.6, "acc,exam_id__2010-01": 0.49411764705882355, "acc,exam_id__2015-16": 0.5875, "acc,exam_id__2017-22": 0.4625, "acc,exam_id__2018-25": 0.5125, "acc,exam_id__2016-19": 0.5384615384615384, "acc,exam_id__2017-24": 0.5375, "acc,exam_id__2012-06": 0.5125, "acc,exam_id__2012-06a": 0.4875, "acc,exam_id__2013-10": 0.5, "acc,exam_id__2017-23": 0.475, "acc,exam_id__2016-21": 0.475, "acc,exam_id__2011-04": 0.525, "acc,exam_id__2012-07": 0.55, "acc,exam_id__2013-12": 0.5375, "acc,exam_id__2012-08": 0.45, "acc,exam_id__2015-17": 0.5769230769230769, "acc,exam_id__2011-03": 0.41414141414141414, "acc,exam_id__2010-02": 0.49, "acc,exam_id__2011-05": 0.55, "main_score": 0.5056947608200456 }, "harness|assin2_rte|assin2_rte|None|15": { "f1_macro,all": 0.8708321784112503, "acc,all": 0.8709150326797386, "main_score": 0.8708321784112503 }, "harness|assin2_sts|assin2_sts|None|15": { "pearson,all": 0.7508245525986388, "mse,all": 0.5094035947712419, "main_score": 0.7508245525986388 }, "harness|faquad_nli|faquad_nli|None|15": { "f1_macro,all": 0.7162112665738773, "acc,all": 0.7707692307692308, "main_score": 0.7162112665738773 }, "harness|hatebr_offensive|hatebr_offensive|None|25": { "f1_macro,all": 0.8238294119604646, "acc,all": 0.8271428571428572, "main_score": 0.8238294119604646 }, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": { "f1_macro,all": 0.6723821369343758, "acc,all": 0.6944770857814336, "main_score": 0.6723821369343758 }, "harness|tweetsentbr|tweetsentbr|None|25": { "f1_macro,all": 0.7162549372015228, "acc,all": 0.7303482587064677, "main_score": 0.7162549372015228 } }
{ "harness|enem_challenge|enem_challenge": "LM Harness task", "harness|bluex|bluex": "LM Harness task", "harness|oab_exams|oab_exams": "LM Harness task", "harness|assin2_rte|assin2_rte": "LM Harness task", "harness|assin2_sts|assin2_sts": "LM Harness task", "harness|faquad_nli|faquad_nli": "LM Harness task", "harness|hatebr_offensive|hatebr_offensive": "LM Harness task", "harness|portuguese_hate_speech|portuguese_hate_speech": "LM Harness task", "harness|tweetsentbr|tweetsentbr": "LM Harness task" }
{ "all": 0, "harness|enem_challenge|enem_challenge": 1.1, "harness|bluex|bluex": 1.1, "harness|oab_exams|oab_exams": 1.5, "harness|assin2_rte|assin2_rte": 1.1, "harness|assin2_sts|assin2_sts": 1.1, "harness|faquad_nli|faquad_nli": 1.1, "harness|hatebr_offensive|hatebr_offensive": 1, "harness|portuguese_hate_speech|portuguese_hate_speech": 1, "harness|tweetsentbr|tweetsentbr": 1 }
{ "harness|enem_challenge|enem_challenge|None|3": { "sample_size": 1429, "truncated": 2, "non_truncated": 1427, "padded": 0, "non_padded": 1429, "fewshots_truncated": 2, "mean_seq_length": 1773.6696990902728, "min_seq_length": 1479, "max_seq_length": 2789, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 2.998600419874038 }, "harness|bluex|bluex|None|3": { "sample_size": 719, "truncated": 3, "non_truncated": 716, "padded": 0, "non_padded": 719, "fewshots_truncated": 5, "mean_seq_length": 1887.1738525730182, "min_seq_length": 1475, "max_seq_length": 2769, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 2.9930458970792766 }, "harness|oab_exams|oab_exams|None|3": { "sample_size": 2195, "truncated": 0, "non_truncated": 2195, "padded": 0, "non_padded": 2195, "fewshots_truncated": 0, "mean_seq_length": 1523.7266514806379, "min_seq_length": 1223, "max_seq_length": 2061, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|assin2_rte|assin2_rte|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1501.5265522875818, "min_seq_length": 1477, "max_seq_length": 1573, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|assin2_sts|assin2_sts|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1720.5265522875818, "min_seq_length": 1696, "max_seq_length": 1792, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|faquad_nli|faquad_nli|None|15": { "sample_size": 650, "truncated": 0, "non_truncated": 650, "padded": 0, "non_padded": 650, "fewshots_truncated": 0, "mean_seq_length": 1760.1292307692308, "min_seq_length": 1700, "max_seq_length": 1893, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|hatebr_offensive|hatebr_offensive|None|25": { "sample_size": 1400, "truncated": 0, "non_truncated": 1400, "padded": 0, "non_padded": 1400, "fewshots_truncated": 0, "mean_seq_length": 1417.9257142857143, "min_seq_length": 1390, "max_seq_length": 1696, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": { "sample_size": 851, "truncated": 0, "non_truncated": 851, "padded": 0, "non_padded": 851, "fewshots_truncated": 0, "mean_seq_length": 1945.7544065804934, "min_seq_length": 1908, "max_seq_length": 1981, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|tweetsentbr|tweetsentbr|None|25": { "sample_size": 2010, "truncated": 0, "non_truncated": 2010, "padded": 0, "non_padded": 2010, "fewshots_truncated": 0, "mean_seq_length": 1614.844776119403, "min_seq_length": 1592, "max_seq_length": 1730, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 } }
{ "truncated": 5, "non_truncated": 14145, "padded": 0, "non_padded": 14150, "fewshots_truncated": 7 }
{ "start_date": "2024-04-14T11-08-02.891090", "start_time": 1713092888.0559978, "end_time": 1713142846.3434105, "total_evaluation_time_seconds": 49958.28741264343, "has_chat_template": false, "chat_type": null, "n_gpus": 1, "accelerate_num_process": null, "model_sha": "dd2c85d649d5e862c747298232cc88acd5467351", "model_dtype": "bfloat16", "model_memory_footprint": 17759490048, "model_num_parameters": 8829407232, "model_is_loaded_in_4bit": null, "model_is_loaded_in_8bit": null, "model_is_quantized": null, "model_device": "cuda:2", "batch_size": 4, "max_length": 2560, "max_ctx_length": 2528, "max_gen_toks": 32, "model_name": "01-ai/Yi-9B", "job_id": 453, "model_id": "01-ai/Yi-9B_eval_request_False_bfloat16_Original", "model_base_model": "", "model_weight_type": "Original", "model_revision": "main", "model_private": false, "model_type": "🟒 : pretrained", "model_architectures": "LlamaForCausalLM", "submitted_time": "2024-04-13T05:20:56", "lm_eval_model_type": "huggingface", "eval_version": "1.1.0" }
{ "all_grouped_average": 0.6931435749553846, "all_grouped_npm": 0.5423674378252739, "all_grouped": { "enem_challenge": 0.6759972008397481, "bluex": 0.5493741307371349, "oab_exams": 0.4783599088838269, "assin2_rte": 0.8784695970900473, "assin2_sts": 0.752860308487488, "faquad_nli": 0.7478708154144531, "hatebr_offensive": 0.8574531631821884, "portuguese_hate_speech": 0.6448598532923182, "tweetsentbr": 0.6530471966712571 }, "all": { "harness|enem_challenge|enem_challenge|None|3": 0.6759972008397481, "harness|bluex|bluex|None|3": 0.5493741307371349, "harness|oab_exams|oab_exams|None|3": 0.4783599088838269, "harness|assin2_rte|assin2_rte|None|15": 0.8784695970900473, "harness|assin2_sts|assin2_sts|None|15": 0.752860308487488, "harness|faquad_nli|faquad_nli|None|15": 0.7478708154144531, "harness|hatebr_offensive|hatebr_offensive|None|25": 0.8574531631821884, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": 0.6448598532923182, "harness|tweetsentbr|tweetsentbr|None|25": 0.6530471966712571 }, "harness|enem_challenge|enem_challenge|None|3": { "acc,all": 0.6759972008397481, "acc,exam_id__2011": 0.7692307692307693, "acc,exam_id__2015": 0.6302521008403361, "acc,exam_id__2017": 0.7241379310344828, "acc,exam_id__2023": 0.6666666666666666, "acc,exam_id__2016_2": 0.6747967479674797, "acc,exam_id__2022": 0.6090225563909775, "acc,exam_id__2009": 0.6521739130434783, "acc,exam_id__2010": 0.6837606837606838, "acc,exam_id__2016": 0.6446280991735537, "acc,exam_id__2014": 0.7064220183486238, "acc,exam_id__2012": 0.7068965517241379, "acc,exam_id__2013": 0.6574074074074074, "main_score": 0.6759972008397481 }, "harness|bluex|bluex|None|3": { "acc,all": 0.5493741307371349, "acc,exam_id__USP_2018": 0.37037037037037035, "acc,exam_id__USP_2019": 0.425, "acc,exam_id__USP_2023": 0.6363636363636364, "acc,exam_id__USP_2022": 0.5306122448979592, "acc,exam_id__UNICAMP_2023": 0.6046511627906976, "acc,exam_id__UNICAMP_2018": 0.48148148148148145, "acc,exam_id__UNICAMP_2019": 0.6, "acc,exam_id__UNICAMP_2024": 0.4666666666666667, "acc,exam_id__UNICAMP_2021_1": 0.6304347826086957, "acc,exam_id__UNICAMP_2021_2": 0.5098039215686274, "acc,exam_id__USP_2020": 0.6071428571428571, "acc,exam_id__UNICAMP_2022": 0.6410256410256411, "acc,exam_id__UNICAMP_2020": 0.6363636363636364, "acc,exam_id__USP_2024": 0.7317073170731707, "acc,exam_id__USP_2021": 0.4230769230769231, "main_score": 0.5493741307371349 }, "harness|oab_exams|oab_exams|None|3": { "acc,all": 0.4783599088838269, "acc,exam_id__2013-11": 0.575, "acc,exam_id__2016-19": 0.46153846153846156, "acc,exam_id__2016-20a": 0.3625, "acc,exam_id__2011-03": 0.3838383838383838, "acc,exam_id__2016-20": 0.525, "acc,exam_id__2018-25": 0.5875, "acc,exam_id__2015-17": 0.5384615384615384, "acc,exam_id__2017-24": 0.4375, "acc,exam_id__2012-06a": 0.5375, "acc,exam_id__2013-10": 0.4625, "acc,exam_id__2013-12": 0.525, "acc,exam_id__2012-06": 0.4125, "acc,exam_id__2014-15": 0.47435897435897434, "acc,exam_id__2017-22": 0.45, "acc,exam_id__2011-04": 0.475, "acc,exam_id__2016-21": 0.425, "acc,exam_id__2012-07": 0.475, "acc,exam_id__2012-08": 0.6, "acc,exam_id__2015-18": 0.4625, "acc,exam_id__2015-16": 0.55, "acc,exam_id__2012-09": 0.4155844155844156, "acc,exam_id__2010-01": 0.38823529411764707, "acc,exam_id__2011-05": 0.5125, "acc,exam_id__2010-02": 0.44, "acc,exam_id__2014-14": 0.575, "acc,exam_id__2017-23": 0.4625, "acc,exam_id__2014-13": 0.4375, "main_score": 0.4783599088838269 }, "harness|assin2_rte|assin2_rte|None|15": { "f1_macro,all": 0.8784695970900473, "acc,all": 0.8786764705882353, "main_score": 0.8784695970900473 }, "harness|assin2_sts|assin2_sts|None|15": { "pearson,all": 0.752860308487488, "mse,all": 0.6242361111111111, "main_score": 0.752860308487488 }, "harness|faquad_nli|faquad_nli|None|15": { "f1_macro,all": 0.7478708154144531, "acc,all": 0.823076923076923, "main_score": 0.7478708154144531 }, "harness|hatebr_offensive|hatebr_offensive|None|25": { "f1_macro,all": 0.8574531631821884, "acc,all": 0.8585714285714285, "main_score": 0.8574531631821884 }, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": { "f1_macro,all": 0.6448598532923182, "acc,all": 0.6580493537015276, "main_score": 0.6448598532923182 }, "harness|tweetsentbr|tweetsentbr|None|25": { "f1_macro,all": 0.6530471966712571, "acc,all": 0.7064676616915423, "main_score": 0.6530471966712571 } }
{ "harness|enem_challenge|enem_challenge": "LM Harness task", "harness|bluex|bluex": "LM Harness task", "harness|oab_exams|oab_exams": "LM Harness task", "harness|assin2_rte|assin2_rte": "LM Harness task", "harness|assin2_sts|assin2_sts": "LM Harness task", "harness|faquad_nli|faquad_nli": "LM Harness task", "harness|hatebr_offensive|hatebr_offensive": "LM Harness task", "harness|portuguese_hate_speech|portuguese_hate_speech": "LM Harness task", "harness|tweetsentbr|tweetsentbr": "LM Harness task" }
{ "all": 0, "harness|enem_challenge|enem_challenge": 1.1, "harness|bluex|bluex": 1.1, "harness|oab_exams|oab_exams": 1.5, "harness|assin2_rte|assin2_rte": 1.1, "harness|assin2_sts|assin2_sts": 1.1, "harness|faquad_nli|faquad_nli": 1.1, "harness|hatebr_offensive|hatebr_offensive": 1, "harness|portuguese_hate_speech|portuguese_hate_speech": 1, "harness|tweetsentbr|tweetsentbr": 1 }
{ "harness|enem_challenge|enem_challenge|None|3": { "sample_size": 1429, "truncated": 2, "non_truncated": 1427, "padded": 0, "non_padded": 1429, "fewshots_truncated": 2, "mean_seq_length": 1773.6696990902728, "min_seq_length": 1479, "max_seq_length": 2789, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 2.998600419874038 }, "harness|bluex|bluex|None|3": { "sample_size": 719, "truncated": 3, "non_truncated": 716, "padded": 0, "non_padded": 719, "fewshots_truncated": 5, "mean_seq_length": 1887.1738525730182, "min_seq_length": 1475, "max_seq_length": 2769, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 2.9930458970792766 }, "harness|oab_exams|oab_exams|None|3": { "sample_size": 2195, "truncated": 0, "non_truncated": 2195, "padded": 0, "non_padded": 2195, "fewshots_truncated": 0, "mean_seq_length": 1523.7266514806379, "min_seq_length": 1223, "max_seq_length": 2061, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|assin2_rte|assin2_rte|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1501.5265522875818, "min_seq_length": 1477, "max_seq_length": 1573, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|assin2_sts|assin2_sts|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1720.5265522875818, "min_seq_length": 1696, "max_seq_length": 1792, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|faquad_nli|faquad_nli|None|15": { "sample_size": 650, "truncated": 0, "non_truncated": 650, "padded": 0, "non_padded": 650, "fewshots_truncated": 0, "mean_seq_length": 1760.1292307692308, "min_seq_length": 1700, "max_seq_length": 1893, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|hatebr_offensive|hatebr_offensive|None|25": { "sample_size": 1400, "truncated": 0, "non_truncated": 1400, "padded": 0, "non_padded": 1400, "fewshots_truncated": 0, "mean_seq_length": 1417.9257142857143, "min_seq_length": 1390, "max_seq_length": 1696, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": { "sample_size": 851, "truncated": 0, "non_truncated": 851, "padded": 0, "non_padded": 851, "fewshots_truncated": 0, "mean_seq_length": 1945.7544065804934, "min_seq_length": 1908, "max_seq_length": 1981, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|tweetsentbr|tweetsentbr|None|25": { "sample_size": 2010, "truncated": 0, "non_truncated": 2010, "padded": 0, "non_padded": 2010, "fewshots_truncated": 0, "mean_seq_length": 1614.844776119403, "min_seq_length": 1592, "max_seq_length": 1730, "max_ctx_length": 2528, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 } }
{ "truncated": 5, "non_truncated": 14145, "padded": 0, "non_padded": 14150, "fewshots_truncated": 7 }
{ "start_date": "2024-02-16T13-19-48.657595", "start_time": 1708089589.3590574, "end_time": 1708096329.521117, "total_evaluation_time_seconds": 6740.162059545517, "has_chat_template": false, "chat_type": null, "n_gpus": 1, "accelerate_num_process": null, "model_sha": "1266df41f51fed9c1914f164152072acc5f89d6d", "model_dtype": "float16", "model_memory_footprint": 13838073856, "model_num_parameters": 6902255616, "model_is_loaded_in_4bit": false, "model_is_loaded_in_8bit": false, "model_is_quantized": null, "model_device": "cuda:0", "batch_size": 16, "max_length": 2048, "max_ctx_length": 2016, "max_gen_toks": 32, "model_name": "22h/cabrita_7b_pt_850000", "job_id": 213, "model_id": "22h/cabrita_7b_pt_850000_eval_request_False_float16_Original", "model_base_model": "", "model_weight_type": "Original", "model_revision": "main", "model_private": false, "model_type": "πŸ†Ž : language adapted models (FP, FT, ...)", "model_architectures": "LlamaForCausalLM", "submitted_time": "2024-02-11T13:34:40", "lm_eval_model_type": "huggingface", "eval_version": "1.0.0" }
{ "all_grouped_average": 0.27092479439362194, "all_grouped_npm": -0.00787880781904908, "all_grouped": { "enem_challenge": 0.2218334499650105, "bluex": 0.24895688456189152, "oab_exams": 0.2783599088838269, "assin2_rte": 0.6996743545023234, "assin2_sts": 0.007646758869425693, "faquad_nli": 0.17721518987341772, "sparrow_pt": 0.26278701409945776 }, "all": { "harness|enem_challenge|enem_challenge|None|3": 0.2218334499650105, "harness|bluex|bluex|None|3": 0.24895688456189152, "harness|oab_exams|oab_exams|None|3": 0.2783599088838269, "harness|assin2_rte|assin2_rte|None|15": 0.6996743545023234, "harness|assin2_sts|assin2_sts|None|15": 0.007646758869425693, "harness|faquad_nli|faquad_nli|None|15": 0.17721518987341772, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": 0.03893621323699084, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": 0.3932038834951456, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": 0.33536123319405053, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": 0.283646726471644 }, "harness|enem_challenge|enem_challenge|None|3": { "acc,all": 0.2218334499650105, "acc,exam_id__2015": 0.18487394957983194, "acc,exam_id__2012": 0.25862068965517243, "acc,exam_id__2017": 0.21551724137931033, "acc,exam_id__2009": 0.23478260869565218, "acc,exam_id__2023": 0.22962962962962963, "acc,exam_id__2016_2": 0.23577235772357724, "acc,exam_id__2010": 0.18803418803418803, "acc,exam_id__2014": 0.25688073394495414, "acc,exam_id__2013": 0.2037037037037037, "acc,exam_id__2022": 0.21052631578947367, "acc,exam_id__2011": 0.23931623931623933, "acc,exam_id__2016": 0.2066115702479339, "main_score": 0.2218334499650105 }, "harness|bluex|bluex|None|3": { "acc,all": 0.24895688456189152, "acc,exam_id__UNICAMP_2022": 0.28205128205128205, "acc,exam_id__UNICAMP_2019": 0.32, "acc,exam_id__UNICAMP_2023": 0.20930232558139536, "acc,exam_id__UNICAMP_2018": 0.3148148148148148, "acc,exam_id__UNICAMP_2020": 0.2727272727272727, "acc,exam_id__USP_2024": 0.17073170731707318, "acc,exam_id__UNICAMP_2021_2": 0.27450980392156865, "acc,exam_id__USP_2022": 0.20408163265306123, "acc,exam_id__USP_2019": 0.125, "acc,exam_id__USP_2021": 0.19230769230769232, "acc,exam_id__USP_2023": 0.25, "acc,exam_id__UNICAMP_2021_1": 0.2608695652173913, "acc,exam_id__UNICAMP_2024": 0.3111111111111111, "acc,exam_id__USP_2020": 0.30357142857142855, "acc,exam_id__USP_2018": 0.2037037037037037, "main_score": 0.24895688456189152 }, "harness|oab_exams|oab_exams|None|3": { "acc,all": 0.2783599088838269, "acc,exam_id__2012-06a": 0.275, "acc,exam_id__2015-17": 0.24358974358974358, "acc,exam_id__2012-06": 0.3, "acc,exam_id__2014-15": 0.3333333333333333, "acc,exam_id__2016-20": 0.275, "acc,exam_id__2013-10": 0.2375, "acc,exam_id__2012-07": 0.2875, "acc,exam_id__2011-03": 0.2828282828282828, "acc,exam_id__2011-05": 0.3, "acc,exam_id__2016-19": 0.28205128205128205, "acc,exam_id__2017-23": 0.2875, "acc,exam_id__2017-22": 0.2625, "acc,exam_id__2018-25": 0.35, "acc,exam_id__2014-13": 0.3375, "acc,exam_id__2017-24": 0.3125, "acc,exam_id__2010-01": 0.25882352941176473, "acc,exam_id__2014-14": 0.275, "acc,exam_id__2011-04": 0.3375, "acc,exam_id__2013-12": 0.2375, "acc,exam_id__2015-16": 0.225, "acc,exam_id__2016-21": 0.3, "acc,exam_id__2013-11": 0.275, "acc,exam_id__2012-08": 0.225, "acc,exam_id__2016-20a": 0.25, "acc,exam_id__2012-09": 0.2727272727272727, "acc,exam_id__2010-02": 0.24, "acc,exam_id__2015-18": 0.2625, "main_score": 0.2783599088838269 }, "harness|assin2_rte|assin2_rte|None|15": { "f1_macro,all": 0.6996743545023234, "acc,all": 0.7205882352941176, "main_score": 0.6996743545023234 }, "harness|assin2_sts|assin2_sts|None|15": { "pearson,all": 0.007646758869425693, "mse,all": 2.2904656862745103, "main_score": 0.007646758869425693 }, "harness|faquad_nli|faquad_nli|None|15": { "f1_macro,all": 0.17721518987341772, "acc,all": 0.2153846153846154, "main_score": 0.17721518987341772 }, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": { "f1_macro,all": 0.03893621323699084, "acc,all": 0.112, "main_score": 0.03893621323699084 }, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": { "f1_macro,all": 0.3932038834951456, "acc,all": 0.648, "main_score": 0.3932038834951456 }, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": { "f1_macro,all": 0.33536123319405053, "acc,all": 0.332, "main_score": 0.33536123319405053 }, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": { "f1_macro,all": 0.283646726471644, "acc,all": 0.316, "main_score": 0.283646726471644 } }
{ "harness|enem_challenge|enem_challenge": "LM Harness task", "harness|bluex|bluex": "LM Harness task", "harness|oab_exams|oab_exams": "LM Harness task", "harness|assin2_rte|assin2_rte": "LM Harness task", "harness|assin2_sts|assin2_sts": "LM Harness task", "harness|faquad_nli|faquad_nli": "LM Harness task", "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por": "LM Harness task", "harness|sparrow_pt|sparrow_hate-2019-fortuna-por": "LM Harness task", "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por": "LM Harness task", "harness|sparrow_pt|sparrow_sentiment-2018-brum-por": "LM Harness task" }
{ "all": 0, "harness|enem_challenge|enem_challenge": 1, "harness|bluex|bluex": 1, "harness|oab_exams|oab_exams": 1.4, "harness|assin2_rte|assin2_rte": 1, "harness|assin2_sts|assin2_sts": 1, "harness|faquad_nli|faquad_nli": 1, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por": 1, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por": 1, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por": 1, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por": 1 }
{ "harness|enem_challenge|enem_challenge|None|3": { "sample_size": 1429, "truncated": 1, "non_truncated": 1428, "padded": 0, "non_padded": 1429, "fewshots_truncated": 1, "mean_seq_length": 1167.6193142057382, "min_seq_length": 982, "max_seq_length": 2222, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 2.9993002099370187 }, "harness|bluex|bluex|None|3": { "sample_size": 719, "truncated": 0, "non_truncated": 719, "padded": 0, "non_padded": 719, "fewshots_truncated": 0, "mean_seq_length": 1135.029207232267, "min_seq_length": 843, "max_seq_length": 1744, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|oab_exams|oab_exams|None|3": { "sample_size": 2195, "truncated": 0, "non_truncated": 2195, "padded": 0, "non_padded": 2195, "fewshots_truncated": 0, "mean_seq_length": 966.5047835990888, "min_seq_length": 777, "max_seq_length": 1319, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|assin2_rte|assin2_rte|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 997.8459967320262, "min_seq_length": 981, "max_seq_length": 1049, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|assin2_sts|assin2_sts|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1034.8459967320262, "min_seq_length": 1018, "max_seq_length": 1086, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|faquad_nli|faquad_nli|None|15": { "sample_size": 650, "truncated": 0, "non_truncated": 650, "padded": 0, "non_padded": 650, "fewshots_truncated": 0, "mean_seq_length": 1133.2415384615385, "min_seq_length": 1096, "max_seq_length": 1210, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1425.31, "min_seq_length": 1408, "max_seq_length": 1470, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1359.854, "min_seq_length": 1340, "max_seq_length": 1421, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1234.784, "min_seq_length": 1221, "max_seq_length": 1262, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1351.984, "min_seq_length": 1338, "max_seq_length": 1381, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 } }
{ "truncated": 1, "non_truncated": 11888, "padded": 0, "non_padded": 11889, "fewshots_truncated": 1 }
{ "start_date": "2024-03-08T02-07-35.059732", "start_time": 1709863656.8278544, "end_time": 1709877780.2710946, "total_evaluation_time_seconds": 14123.44324016571, "has_chat_template": false, "chat_type": null, "n_gpus": 1, "accelerate_num_process": null, "model_sha": "1266df41f51fed9c1914f164152072acc5f89d6d", "model_dtype": "float16", "model_memory_footprint": 13842268160, "model_num_parameters": 6902255616, "model_is_loaded_in_4bit": null, "model_is_loaded_in_8bit": null, "model_is_quantized": null, "model_device": "cuda:1", "batch_size": 8, "max_length": 2048, "max_ctx_length": 2016, "max_gen_toks": 32, "model_name": "22h/cabrita_7b_pt_850000", "job_id": 305, "model_id": "22h/cabrita_7b_pt_850000_eval_request_False_float16_Original", "model_base_model": "", "model_weight_type": "Original", "model_revision": "main", "model_private": false, "model_type": "πŸ†Ž : language adapted models (FP, FT, ...)", "model_architectures": "LlamaForCausalLM", "submitted_time": "2024-02-11T13:34:40", "lm_eval_model_type": "huggingface", "eval_version": "1.1.0" }
{ "all_grouped_average": 0.32141956108734737, "all_grouped_npm": -0.03225449824402505, "all_grouped": { "enem_challenge": 0.22533240027991602, "bluex": 0.23087621696801114, "oab_exams": 0.2920273348519362, "assin2_rte": 0.3333333333333333, "assin2_sts": 0.1265472264440735, "faquad_nli": 0.17721518987341772, "hatebr_offensive": 0.5597546967409981, "portuguese_hate_speech": 0.490163110698825, "tweetsentbr": 0.4575265405956153 }, "all": { "harness|enem_challenge|enem_challenge|None|3": 0.22533240027991602, "harness|bluex|bluex|None|3": 0.23087621696801114, "harness|oab_exams|oab_exams|None|3": 0.2920273348519362, "harness|assin2_rte|assin2_rte|None|15": 0.3333333333333333, "harness|assin2_sts|assin2_sts|None|15": 0.1265472264440735, "harness|faquad_nli|faquad_nli|None|15": 0.17721518987341772, "harness|hatebr_offensive|hatebr_offensive|None|25": 0.5597546967409981, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": 0.490163110698825, "harness|tweetsentbr|tweetsentbr|None|25": 0.4575265405956153 }, "harness|enem_challenge|enem_challenge|None|3": { "acc,all": 0.22533240027991602, "acc,exam_id__2016_2": 0.23577235772357724, "acc,exam_id__2016": 0.2231404958677686, "acc,exam_id__2015": 0.16806722689075632, "acc,exam_id__2013": 0.2037037037037037, "acc,exam_id__2023": 0.25925925925925924, "acc,exam_id__2009": 0.23478260869565218, "acc,exam_id__2012": 0.25, "acc,exam_id__2022": 0.24060150375939848, "acc,exam_id__2010": 0.17094017094017094, "acc,exam_id__2017": 0.22413793103448276, "acc,exam_id__2014": 0.23853211009174313, "acc,exam_id__2011": 0.24786324786324787, "main_score": 0.22533240027991602 }, "harness|bluex|bluex|None|3": { "acc,all": 0.23087621696801114, "acc,exam_id__UNICAMP_2022": 0.2564102564102564, "acc,exam_id__UNICAMP_2023": 0.3023255813953488, "acc,exam_id__USP_2021": 0.21153846153846154, "acc,exam_id__UNICAMP_2024": 0.26666666666666666, "acc,exam_id__USP_2024": 0.2682926829268293, "acc,exam_id__UNICAMP_2021_2": 0.27450980392156865, "acc,exam_id__UNICAMP_2021_1": 0.30434782608695654, "acc,exam_id__UNICAMP_2019": 0.3, "acc,exam_id__UNICAMP_2020": 0.16363636363636364, "acc,exam_id__USP_2019": 0.15, "acc,exam_id__USP_2018": 0.09259259259259259, "acc,exam_id__USP_2020": 0.26785714285714285, "acc,exam_id__UNICAMP_2018": 0.2777777777777778, "acc,exam_id__USP_2022": 0.1836734693877551, "acc,exam_id__USP_2023": 0.1590909090909091, "main_score": 0.23087621696801114 }, "harness|oab_exams|oab_exams|None|3": { "acc,all": 0.2920273348519362, "acc,exam_id__2014-15": 0.358974358974359, "acc,exam_id__2015-18": 0.275, "acc,exam_id__2016-20": 0.3375, "acc,exam_id__2011-05": 0.3, "acc,exam_id__2013-12": 0.3, "acc,exam_id__2013-11": 0.325, "acc,exam_id__2017-23": 0.25, "acc,exam_id__2017-22": 0.3125, "acc,exam_id__2015-16": 0.2625, "acc,exam_id__2017-24": 0.3, "acc,exam_id__2011-04": 0.2875, "acc,exam_id__2016-21": 0.35, "acc,exam_id__2011-03": 0.29292929292929293, "acc,exam_id__2014-14": 0.2875, "acc,exam_id__2015-17": 0.2564102564102564, "acc,exam_id__2012-08": 0.25, "acc,exam_id__2010-02": 0.27, "acc,exam_id__2012-06": 0.2875, "acc,exam_id__2010-01": 0.2823529411764706, "acc,exam_id__2016-19": 0.32051282051282054, "acc,exam_id__2014-13": 0.3125, "acc,exam_id__2012-09": 0.2857142857142857, "acc,exam_id__2012-06a": 0.2625, "acc,exam_id__2012-07": 0.2375, "acc,exam_id__2013-10": 0.275, "acc,exam_id__2016-20a": 0.2375, "acc,exam_id__2018-25": 0.375, "main_score": 0.2920273348519362 }, "harness|assin2_rte|assin2_rte|None|15": { "f1_macro,all": 0.3333333333333333, "acc,all": 0.5, "main_score": 0.3333333333333333 }, "harness|assin2_sts|assin2_sts|None|15": { "pearson,all": 0.1265472264440735, "mse,all": 2.4658741830065356, "main_score": 0.1265472264440735 }, "harness|faquad_nli|faquad_nli|None|15": { "f1_macro,all": 0.17721518987341772, "acc,all": 0.2153846153846154, "main_score": 0.17721518987341772 }, "harness|hatebr_offensive|hatebr_offensive|None|25": { "f1_macro,all": 0.5597546967409981, "acc,all": 0.6214285714285714, "main_score": 0.5597546967409981 }, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": { "f1_macro,all": 0.490163110698825, "acc,all": 0.7085781433607521, "main_score": 0.490163110698825 }, "harness|tweetsentbr|tweetsentbr|None|25": { "f1_macro,all": 0.4575265405956153, "acc,all": 0.56318407960199, "main_score": 0.4575265405956153 } }
{ "harness|enem_challenge|enem_challenge": "LM Harness task", "harness|bluex|bluex": "LM Harness task", "harness|oab_exams|oab_exams": "LM Harness task", "harness|assin2_rte|assin2_rte": "LM Harness task", "harness|assin2_sts|assin2_sts": "LM Harness task", "harness|faquad_nli|faquad_nli": "LM Harness task", "harness|hatebr_offensive|hatebr_offensive": "LM Harness task", "harness|portuguese_hate_speech|portuguese_hate_speech": "LM Harness task", "harness|tweetsentbr|tweetsentbr": "LM Harness task" }
{ "all": 0, "harness|enem_challenge|enem_challenge": 1.1, "harness|bluex|bluex": 1.1, "harness|oab_exams|oab_exams": 1.5, "harness|assin2_rte|assin2_rte": 1.1, "harness|assin2_sts|assin2_sts": 1.1, "harness|faquad_nli|faquad_nli": 1.1, "harness|hatebr_offensive|hatebr_offensive": 1, "harness|portuguese_hate_speech|portuguese_hate_speech": 1, "harness|tweetsentbr|tweetsentbr": 1 }
{ "harness|enem_challenge|enem_challenge|None|3": { "sample_size": 1429, "truncated": 2, "non_truncated": 1427, "padded": 0, "non_padded": 1429, "fewshots_truncated": 2, "mean_seq_length": 1176.6193142057382, "min_seq_length": 991, "max_seq_length": 2231, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 2.998600419874038 }, "harness|bluex|bluex|None|3": { "sample_size": 719, "truncated": 0, "non_truncated": 719, "padded": 0, "non_padded": 719, "fewshots_truncated": 0, "mean_seq_length": 1339.029207232267, "min_seq_length": 1047, "max_seq_length": 1948, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|oab_exams|oab_exams|None|3": { "sample_size": 2195, "truncated": 0, "non_truncated": 2195, "padded": 0, "non_padded": 2195, "fewshots_truncated": 0, "mean_seq_length": 974.5047835990888, "min_seq_length": 785, "max_seq_length": 1327, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|assin2_rte|assin2_rte|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1082.8459967320262, "min_seq_length": 1066, "max_seq_length": 1134, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|assin2_sts|assin2_sts|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1107.8459967320262, "min_seq_length": 1091, "max_seq_length": 1159, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|faquad_nli|faquad_nli|None|15": { "sample_size": 650, "truncated": 0, "non_truncated": 650, "padded": 0, "non_padded": 650, "fewshots_truncated": 0, "mean_seq_length": 1155.2415384615385, "min_seq_length": 1118, "max_seq_length": 1232, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|hatebr_offensive|hatebr_offensive|None|25": { "sample_size": 1400, "truncated": 0, "non_truncated": 1400, "padded": 0, "non_padded": 1400, "fewshots_truncated": 0, "mean_seq_length": 1091.5071428571428, "min_seq_length": 1071, "max_seq_length": 1301, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": { "sample_size": 851, "truncated": 0, "non_truncated": 851, "padded": 0, "non_padded": 851, "fewshots_truncated": 0, "mean_seq_length": 1415.5381903642774, "min_seq_length": 1386, "max_seq_length": 1462, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|tweetsentbr|tweetsentbr|None|25": { "sample_size": 2010, "truncated": 0, "non_truncated": 2010, "padded": 0, "non_padded": 2010, "fewshots_truncated": 0, "mean_seq_length": 1333.926368159204, "min_seq_length": 1315, "max_seq_length": 1451, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 } }
{ "truncated": 2, "non_truncated": 14148, "padded": 0, "non_padded": 14150, "fewshots_truncated": 2 }
{ "start_date": "2024-02-16T12-41-51.764611", "start_time": 1708087312.3478136, "end_time": 1708089550.4972095, "total_evaluation_time_seconds": 2238.149395942688, "has_chat_template": false, "chat_type": null, "n_gpus": 1, "accelerate_num_process": null, "model_sha": "fc2a2de94a3b31de54aaace695537c4d1c3e456d", "model_dtype": "float16", "model_memory_footprint": 7130251600, "model_num_parameters": 3554473600, "model_is_loaded_in_4bit": false, "model_is_loaded_in_8bit": false, "model_is_quantized": null, "model_device": "cuda:0", "batch_size": 32, "max_length": 2048, "max_ctx_length": 2016, "max_gen_toks": 32, "model_name": "22h/open-cabrita3b", "job_id": 212, "model_id": "22h/open-cabrita3b_eval_request_False_float16_Original", "model_base_model": "", "model_weight_type": "Original", "model_revision": "main", "model_private": false, "model_type": "πŸ†Ž : language adapted models (FP, FT, ...)", "model_architectures": "LlamaForCausalLM", "submitted_time": "2024-02-11T13:34:36", "lm_eval_model_type": "huggingface", "eval_version": "1.0.0" }
{ "all_grouped_average": 0.24167996679748435, "all_grouped_npm": -0.05887820900308151, "all_grouped": { "enem_challenge": 0.18124562631210636, "bluex": 0.18497913769123783, "oab_exams": 0.23006833712984054, "assin2_rte": 0.38392470950636587, "assin2_sts": 0.037061970342522815, "faquad_nli": 0.4396551724137931, "sparrow_pt": 0.23482481418652404 }, "all": { "harness|enem_challenge|enem_challenge|None|3": 0.18124562631210636, "harness|bluex|bluex|None|3": 0.18497913769123783, "harness|oab_exams|oab_exams|None|3": 0.23006833712984054, "harness|assin2_rte|assin2_rte|None|15": 0.38392470950636587, "harness|assin2_sts|assin2_sts|None|15": 0.037061970342522815, "harness|faquad_nli|faquad_nli|None|15": 0.4396551724137931, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": 0.021198458199745147, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": 0.3932038834951456, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": 0.32555068399362747, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": 0.19934623105757787 }, "harness|enem_challenge|enem_challenge|None|3": { "acc,all": 0.18124562631210636, "acc,exam_id__2015": 0.15126050420168066, "acc,exam_id__2012": 0.1810344827586207, "acc,exam_id__2017": 0.19827586206896552, "acc,exam_id__2009": 0.1391304347826087, "acc,exam_id__2023": 0.23703703703703705, "acc,exam_id__2016_2": 0.2032520325203252, "acc,exam_id__2010": 0.1623931623931624, "acc,exam_id__2014": 0.1834862385321101, "acc,exam_id__2013": 0.1388888888888889, "acc,exam_id__2022": 0.20300751879699247, "acc,exam_id__2011": 0.20512820512820512, "acc,exam_id__2016": 0.15702479338842976, "main_score": 0.18124562631210636 }, "harness|bluex|bluex|None|3": { "acc,all": 0.18497913769123783, "acc,exam_id__UNICAMP_2022": 0.1794871794871795, "acc,exam_id__UNICAMP_2019": 0.14, "acc,exam_id__UNICAMP_2023": 0.32558139534883723, "acc,exam_id__UNICAMP_2018": 0.16666666666666666, "acc,exam_id__UNICAMP_2020": 0.2, "acc,exam_id__USP_2024": 0.17073170731707318, "acc,exam_id__UNICAMP_2021_2": 0.1568627450980392, "acc,exam_id__USP_2022": 0.16326530612244897, "acc,exam_id__USP_2019": 0.275, "acc,exam_id__USP_2021": 0.19230769230769232, "acc,exam_id__USP_2023": 0.09090909090909091, "acc,exam_id__UNICAMP_2021_1": 0.30434782608695654, "acc,exam_id__UNICAMP_2024": 0.15555555555555556, "acc,exam_id__USP_2020": 0.16071428571428573, "acc,exam_id__USP_2018": 0.12962962962962962, "main_score": 0.18497913769123783 }, "harness|oab_exams|oab_exams|None|3": { "acc,all": 0.23006833712984054, "acc,exam_id__2012-06a": 0.2625, "acc,exam_id__2015-17": 0.23076923076923078, "acc,exam_id__2012-06": 0.225, "acc,exam_id__2014-15": 0.21794871794871795, "acc,exam_id__2016-20": 0.2125, "acc,exam_id__2013-10": 0.225, "acc,exam_id__2012-07": 0.1375, "acc,exam_id__2011-03": 0.24242424242424243, "acc,exam_id__2011-05": 0.2, "acc,exam_id__2016-19": 0.19230769230769232, "acc,exam_id__2017-23": 0.225, "acc,exam_id__2017-22": 0.275, "acc,exam_id__2018-25": 0.3, "acc,exam_id__2014-13": 0.2875, "acc,exam_id__2017-24": 0.225, "acc,exam_id__2010-01": 0.25882352941176473, "acc,exam_id__2014-14": 0.2375, "acc,exam_id__2011-04": 0.225, "acc,exam_id__2013-12": 0.1875, "acc,exam_id__2015-16": 0.25, "acc,exam_id__2016-21": 0.2125, "acc,exam_id__2013-11": 0.175, "acc,exam_id__2012-08": 0.2, "acc,exam_id__2016-20a": 0.275, "acc,exam_id__2012-09": 0.23376623376623376, "acc,exam_id__2010-02": 0.24, "acc,exam_id__2015-18": 0.25, "main_score": 0.23006833712984054 }, "harness|assin2_rte|assin2_rte|None|15": { "f1_macro,all": 0.38392470950636587, "acc,all": 0.5236928104575164, "main_score": 0.38392470950636587 }, "harness|assin2_sts|assin2_sts|None|15": { "pearson,all": 0.037061970342522815, "mse,all": 2.548566176470588, "main_score": 0.037061970342522815 }, "harness|faquad_nli|faquad_nli|None|15": { "f1_macro,all": 0.4396551724137931, "acc,all": 0.7846153846153846, "main_score": 0.4396551724137931 }, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": { "f1_macro,all": 0.021198458199745147, "acc,all": 0.086, "main_score": 0.021198458199745147 }, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": { "f1_macro,all": 0.3932038834951456, "acc,all": 0.648, "main_score": 0.3932038834951456 }, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": { "f1_macro,all": 0.32555068399362747, "acc,all": 0.524, "main_score": 0.32555068399362747 }, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": { "f1_macro,all": 0.19934623105757787, "acc,all": 0.344, "main_score": 0.19934623105757787 } }
{ "harness|enem_challenge|enem_challenge": "LM Harness task", "harness|bluex|bluex": "LM Harness task", "harness|oab_exams|oab_exams": "LM Harness task", "harness|assin2_rte|assin2_rte": "LM Harness task", "harness|assin2_sts|assin2_sts": "LM Harness task", "harness|faquad_nli|faquad_nli": "LM Harness task", "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por": "LM Harness task", "harness|sparrow_pt|sparrow_hate-2019-fortuna-por": "LM Harness task", "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por": "LM Harness task", "harness|sparrow_pt|sparrow_sentiment-2018-brum-por": "LM Harness task" }
{ "all": 0, "harness|enem_challenge|enem_challenge": 1, "harness|bluex|bluex": 1, "harness|oab_exams|oab_exams": 1.4, "harness|assin2_rte|assin2_rte": 1, "harness|assin2_sts|assin2_sts": 1, "harness|faquad_nli|faquad_nli": 1, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por": 1, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por": 1, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por": 1, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por": 1 }
{ "harness|enem_challenge|enem_challenge|None|3": { "sample_size": 1429, "truncated": 2, "non_truncated": 1427, "padded": 0, "non_padded": 1429, "fewshots_truncated": 3, "mean_seq_length": 1232.1665500349895, "min_seq_length": 1029, "max_seq_length": 2361, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 2.9979006298110566 }, "harness|bluex|bluex|None|3": { "sample_size": 719, "truncated": 0, "non_truncated": 719, "padded": 0, "non_padded": 719, "fewshots_truncated": 0, "mean_seq_length": 1171.4019471488177, "min_seq_length": 866, "max_seq_length": 1809, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|oab_exams|oab_exams|None|3": { "sample_size": 2195, "truncated": 0, "non_truncated": 2195, "padded": 0, "non_padded": 2195, "fewshots_truncated": 0, "mean_seq_length": 1029.7216400911161, "min_seq_length": 835, "max_seq_length": 1355, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|assin2_rte|assin2_rte|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1026.5416666666667, "min_seq_length": 1009, "max_seq_length": 1077, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|assin2_sts|assin2_sts|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1066.5416666666667, "min_seq_length": 1049, "max_seq_length": 1117, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|faquad_nli|faquad_nli|None|15": { "sample_size": 650, "truncated": 0, "non_truncated": 650, "padded": 0, "non_padded": 650, "fewshots_truncated": 0, "mean_seq_length": 1216.533846153846, "min_seq_length": 1179, "max_seq_length": 1313, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1477.946, "min_seq_length": 1460, "max_seq_length": 1525, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1389.29, "min_seq_length": 1369, "max_seq_length": 1437, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1305.254, "min_seq_length": 1291, "max_seq_length": 1336, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1438.362, "min_seq_length": 1424, "max_seq_length": 1474, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 } }
{ "truncated": 2, "non_truncated": 11887, "padded": 0, "non_padded": 11889, "fewshots_truncated": 3 }
{ "start_date": "2024-02-28T16-38-27.766897", "start_time": 1709138308.540028, "end_time": 1709143203.9630034, "total_evaluation_time_seconds": 4895.422975301743, "has_chat_template": false, "chat_type": null, "n_gpus": 1, "accelerate_num_process": null, "model_sha": "fc2a2de94a3b31de54aaace695537c4d1c3e456d", "model_dtype": "float16", "model_memory_footprint": 7142506832, "model_num_parameters": 3554473600, "model_is_loaded_in_4bit": null, "model_is_loaded_in_8bit": null, "model_is_quantized": null, "model_device": "cuda:0", "batch_size": 16, "max_length": 2048, "max_ctx_length": 2016, "max_gen_toks": 32, "model_name": "22h/open-cabrita3b", "job_id": 285, "model_id": "22h/open-cabrita3b_eval_request_False_float16_Original", "model_base_model": "", "model_weight_type": "Original", "model_revision": "main", "model_private": false, "model_type": "πŸ†Ž : language adapted models (FP, FT, ...)", "model_architectures": "LlamaForCausalLM", "submitted_time": "2024-02-11T13:34:36", "lm_eval_model_type": "huggingface", "eval_version": "1.1.0" }
{ "all_grouped_average": 0.3303614816761663, "all_grouped_npm": -0.005341553963556416, "all_grouped": { "enem_challenge": 0.17984604618614417, "bluex": 0.2114047287899861, "oab_exams": 0.22687927107061504, "assin2_rte": 0.4301327637723658, "assin2_sts": 0.08919111846797594, "faquad_nli": 0.4396551724137931, "hatebr_offensive": 0.5046251022011318, "portuguese_hate_speech": 0.4118866620594333, "tweetsentbr": 0.47963247012405114 }, "all": { "harness|enem_challenge|enem_challenge|None|3": 0.17984604618614417, "harness|bluex|bluex|None|3": 0.2114047287899861, "harness|oab_exams|oab_exams|None|3": 0.22687927107061504, "harness|assin2_rte|assin2_rte|None|15": 0.4301327637723658, "harness|assin2_sts|assin2_sts|None|15": 0.08919111846797594, "harness|faquad_nli|faquad_nli|None|15": 0.4396551724137931, "harness|hatebr_offensive|hatebr_offensive|None|25": 0.5046251022011318, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": 0.4118866620594333, "harness|tweetsentbr|tweetsentbr|None|25": 0.47963247012405114 }, "harness|enem_challenge|enem_challenge|None|3": { "acc,all": 0.17984604618614417, "acc,exam_id__2016_2": 0.2032520325203252, "acc,exam_id__2023": 0.22962962962962963, "acc,exam_id__2014": 0.1834862385321101, "acc,exam_id__2017": 0.1810344827586207, "acc,exam_id__2009": 0.1391304347826087, "acc,exam_id__2015": 0.14285714285714285, "acc,exam_id__2016": 0.18181818181818182, "acc,exam_id__2022": 0.18796992481203006, "acc,exam_id__2012": 0.1896551724137931, "acc,exam_id__2013": 0.1388888888888889, "acc,exam_id__2011": 0.20512820512820512, "acc,exam_id__2010": 0.1623931623931624, "main_score": 0.17984604618614417 }, "harness|bluex|bluex|None|3": { "acc,all": 0.2114047287899861, "acc,exam_id__USP_2023": 0.1590909090909091, "acc,exam_id__UNICAMP_2023": 0.2558139534883721, "acc,exam_id__UNICAMP_2024": 0.26666666666666666, "acc,exam_id__USP_2021": 0.1346153846153846, "acc,exam_id__UNICAMP_2021_2": 0.23529411764705882, "acc,exam_id__UNICAMP_2019": 0.2, "acc,exam_id__UNICAMP_2022": 0.2564102564102564, "acc,exam_id__UNICAMP_2018": 0.2777777777777778, "acc,exam_id__UNICAMP_2020": 0.2727272727272727, "acc,exam_id__USP_2020": 0.14285714285714285, "acc,exam_id__USP_2018": 0.09259259259259259, "acc,exam_id__USP_2019": 0.25, "acc,exam_id__UNICAMP_2021_1": 0.32608695652173914, "acc,exam_id__USP_2024": 0.17073170731707318, "acc,exam_id__USP_2022": 0.16326530612244897, "main_score": 0.2114047287899861 }, "harness|oab_exams|oab_exams|None|3": { "acc,all": 0.22687927107061504, "acc,exam_id__2012-08": 0.1875, "acc,exam_id__2015-17": 0.2564102564102564, "acc,exam_id__2012-09": 0.22077922077922077, "acc,exam_id__2013-11": 0.1875, "acc,exam_id__2014-13": 0.275, "acc,exam_id__2012-06": 0.225, "acc,exam_id__2017-24": 0.2125, "acc,exam_id__2010-01": 0.2823529411764706, "acc,exam_id__2016-20a": 0.25, "acc,exam_id__2012-06a": 0.225, "acc,exam_id__2017-23": 0.225, "acc,exam_id__2014-14": 0.25, "acc,exam_id__2018-25": 0.3, "acc,exam_id__2013-10": 0.2375, "acc,exam_id__2011-05": 0.2375, "acc,exam_id__2017-22": 0.25, "acc,exam_id__2011-03": 0.23232323232323232, "acc,exam_id__2016-21": 0.2125, "acc,exam_id__2015-16": 0.25, "acc,exam_id__2011-04": 0.225, "acc,exam_id__2016-20": 0.2, "acc,exam_id__2014-15": 0.21794871794871795, "acc,exam_id__2012-07": 0.15, "acc,exam_id__2016-19": 0.16666666666666666, "acc,exam_id__2015-18": 0.2375, "acc,exam_id__2013-12": 0.175, "acc,exam_id__2010-02": 0.23, "main_score": 0.22687927107061504 }, "harness|assin2_rte|assin2_rte|None|15": { "f1_macro,all": 0.4301327637723658, "acc,all": 0.5379901960784313, "main_score": 0.4301327637723658 }, "harness|assin2_sts|assin2_sts|None|15": { "pearson,all": 0.08919111846797594, "mse,all": 2.491196895424836, "main_score": 0.08919111846797594 }, "harness|faquad_nli|faquad_nli|None|15": { "f1_macro,all": 0.4396551724137931, "acc,all": 0.7846153846153846, "main_score": 0.4396551724137931 }, "harness|hatebr_offensive|hatebr_offensive|None|25": { "f1_macro,all": 0.5046251022011318, "acc,all": 0.5585714285714286, "main_score": 0.5046251022011318 }, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": { "f1_macro,all": 0.4118866620594333, "acc,all": 0.700352526439483, "main_score": 0.4118866620594333 }, "harness|tweetsentbr|tweetsentbr|None|25": { "f1_macro,all": 0.47963247012405114, "acc,all": 0.5900497512437811, "main_score": 0.47963247012405114 } }
{ "harness|enem_challenge|enem_challenge": "LM Harness task", "harness|bluex|bluex": "LM Harness task", "harness|oab_exams|oab_exams": "LM Harness task", "harness|assin2_rte|assin2_rte": "LM Harness task", "harness|assin2_sts|assin2_sts": "LM Harness task", "harness|faquad_nli|faquad_nli": "LM Harness task", "harness|hatebr_offensive|hatebr_offensive": "LM Harness task", "harness|portuguese_hate_speech|portuguese_hate_speech": "LM Harness task", "harness|tweetsentbr|tweetsentbr": "LM Harness task" }
{ "all": 0, "harness|enem_challenge|enem_challenge": 1.1, "harness|bluex|bluex": 1.1, "harness|oab_exams|oab_exams": 1.5, "harness|assin2_rte|assin2_rte": 1.1, "harness|assin2_sts|assin2_sts": 1.1, "harness|faquad_nli|faquad_nli": 1.1, "harness|hatebr_offensive|hatebr_offensive": 1, "harness|portuguese_hate_speech|portuguese_hate_speech": 1, "harness|tweetsentbr|tweetsentbr": 1 }
{ "harness|enem_challenge|enem_challenge|None|3": { "sample_size": 1429, "truncated": 2, "non_truncated": 1427, "padded": 0, "non_padded": 1429, "fewshots_truncated": 3, "mean_seq_length": 1243.1665500349895, "min_seq_length": 1040, "max_seq_length": 2372, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 2.9979006298110566 }, "harness|bluex|bluex|None|3": { "sample_size": 719, "truncated": 2, "non_truncated": 717, "padded": 0, "non_padded": 719, "fewshots_truncated": 2, "mean_seq_length": 1400.4019471488177, "min_seq_length": 1095, "max_seq_length": 2038, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 2.9972183588317107 }, "harness|oab_exams|oab_exams|None|3": { "sample_size": 2195, "truncated": 0, "non_truncated": 2195, "padded": 0, "non_padded": 2195, "fewshots_truncated": 0, "mean_seq_length": 1039.7216400911161, "min_seq_length": 845, "max_seq_length": 1365, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 3 }, "harness|assin2_rte|assin2_rte|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1103.5416666666667, "min_seq_length": 1086, "max_seq_length": 1154, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|assin2_sts|assin2_sts|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1149.5416666666667, "min_seq_length": 1132, "max_seq_length": 1200, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|faquad_nli|faquad_nli|None|15": { "sample_size": 650, "truncated": 0, "non_truncated": 650, "padded": 0, "non_padded": 650, "fewshots_truncated": 0, "mean_seq_length": 1238.533846153846, "min_seq_length": 1201, "max_seq_length": 1335, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|hatebr_offensive|hatebr_offensive|None|25": { "sample_size": 1400, "truncated": 0, "non_truncated": 1400, "padded": 0, "non_padded": 1400, "fewshots_truncated": 0, "mean_seq_length": 1132.2078571428572, "min_seq_length": 1111, "max_seq_length": 1354, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": { "sample_size": 851, "truncated": 0, "non_truncated": 851, "padded": 0, "non_padded": 851, "fewshots_truncated": 0, "mean_seq_length": 1463.2338425381904, "min_seq_length": 1433, "max_seq_length": 1510, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|tweetsentbr|tweetsentbr|None|25": { "sample_size": 2010, "truncated": 0, "non_truncated": 2010, "padded": 0, "non_padded": 2010, "fewshots_truncated": 0, "mean_seq_length": 1411.4726368159204, "min_seq_length": 1392, "max_seq_length": 1529, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 } }
{ "truncated": 4, "non_truncated": 14146, "padded": 0, "non_padded": 14150, "fewshots_truncated": 5 }
{ "start_date": "2024-02-08T16-32-05.080295", "start_time": 1707409932.3430328, "end_time": 1707418062.4384792, "total_evaluation_time_seconds": 8130.09544634819, "has_chat_template": false, "chat_type": null, "n_gpus": 1, "accelerate_num_process": null, "model_sha": "b88054ca1b30d740149763d24cd6977ca3138702", "model_dtype": "float16", "model_memory_footprint": 40865865816, "model_num_parameters": 20340658176, "model_is_loaded_in_4bit": false, "model_is_loaded_in_8bit": false, "model_is_quantized": null, "model_device": "cuda:0", "batch_size": 8, "max_length": 2048, "max_ctx_length": 2016, "max_gen_toks": 32, "model_name": "AI-Sweden-Models/gpt-sw3-20b", "job_id": 102, "model_id": "AI-Sweden-Models/gpt-sw3-20b_eval_request_False_float16_Original", "model_base_model": "", "model_weight_type": "Original", "model_revision": "main", "model_private": false, "model_type": "🟒 : pretrained", "model_architectures": "GPT2LMHeadModel", "submitted_time": "2024-02-05T23:15:38", "lm_eval_model_type": "huggingface", "eval_version": "1.0.0" }
{ "all_grouped_average": 0.2671930701793801, "all_grouped_npm": -0.013569520485616151, "all_grouped": { "enem_challenge": 0.19454163750874737, "bluex": 0.21835883171070933, "oab_exams": 0.24009111617312073, "assin2_rte": 0.5097556100727492, "assin2_sts": 0.04176979032426292, "faquad_nli": 0.4396551724137931, "sparrow_pt": 0.2261793330522781 }, "all": { "harness|enem_challenge|enem_challenge|None|3": 0.19454163750874737, "harness|bluex|bluex|None|3": 0.21835883171070933, "harness|oab_exams|oab_exams|None|3": 0.24009111617312073, "harness|assin2_rte|assin2_rte|None|15": 0.5097556100727492, "harness|assin2_sts|assin2_sts|None|15": 0.04176979032426292, "harness|faquad_nli|faquad_nli|None|15": 0.4396551724137931, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": 0.03209457599403715, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": 0.3932038834951456, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": 0.3038200710445833, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": 0.17559880167534636 }, "harness|enem_challenge|enem_challenge|None|3": { "acc,all": 0.19454163750874737, "acc,exam_id__2023": 0.16296296296296298, "acc,exam_id__2009": 0.2, "acc,exam_id__2013": 0.17592592592592593, "acc,exam_id__2011": 0.23931623931623933, "acc,exam_id__2014": 0.1834862385321101, "acc,exam_id__2016_2": 0.1951219512195122, "acc,exam_id__2022": 0.22556390977443608, "acc,exam_id__2017": 0.1896551724137931, "acc,exam_id__2016": 0.2066115702479339, "acc,exam_id__2015": 0.17647058823529413, "acc,exam_id__2012": 0.25862068965517243, "acc,exam_id__2010": 0.11965811965811966, "main_score": 0.19454163750874737 }, "harness|bluex|bluex|None|3": { "acc,all": 0.21835883171070933, "acc,exam_id__USP_2018": 0.2222222222222222, "acc,exam_id__USP_2024": 0.04878048780487805, "acc,exam_id__USP_2021": 0.23076923076923078, "acc,exam_id__UNICAMP_2021_1": 0.3695652173913043, "acc,exam_id__USP_2020": 0.19642857142857142, "acc,exam_id__USP_2022": 0.14285714285714285, "acc,exam_id__UNICAMP_2022": 0.28205128205128205, "acc,exam_id__UNICAMP_2020": 0.21818181818181817, "acc,exam_id__USP_2023": 0.1590909090909091, "acc,exam_id__USP_2019": 0.225, "acc,exam_id__UNICAMP_2023": 0.32558139534883723, "acc,exam_id__UNICAMP_2024": 0.28888888888888886, "acc,exam_id__UNICAMP_2021_2": 0.19607843137254902, "acc,exam_id__UNICAMP_2019": 0.16, "acc,exam_id__UNICAMP_2018": 0.2222222222222222, "main_score": 0.21835883171070933 }, "harness|oab_exams|oab_exams|None|3": { "acc,all": 0.24009111617312073, "acc,exam_id__2010-01": 0.25882352941176473, "acc,exam_id__2017-24": 0.25, "acc,exam_id__2016-21": 0.225, "acc,exam_id__2016-19": 0.21794871794871795, "acc,exam_id__2013-12": 0.175, "acc,exam_id__2015-17": 0.24358974358974358, "acc,exam_id__2012-09": 0.22077922077922077, "acc,exam_id__2014-14": 0.2625, "acc,exam_id__2013-10": 0.25, "acc,exam_id__2012-06a": 0.25, "acc,exam_id__2016-20a": 0.325, "acc,exam_id__2018-25": 0.2875, "acc,exam_id__2011-04": 0.3, "acc,exam_id__2011-05": 0.25, "acc,exam_id__2017-22": 0.2375, "acc,exam_id__2014-13": 0.2375, "acc,exam_id__2012-08": 0.2625, "acc,exam_id__2013-11": 0.2125, "acc,exam_id__2011-03": 0.23232323232323232, "acc,exam_id__2012-07": 0.15, "acc,exam_id__2015-16": 0.2125, "acc,exam_id__2014-15": 0.23076923076923078, "acc,exam_id__2010-02": 0.26, "acc,exam_id__2016-20": 0.225, "acc,exam_id__2017-23": 0.2125, "acc,exam_id__2012-06": 0.225, "acc,exam_id__2015-18": 0.2625, "main_score": 0.24009111617312073 }, "harness|assin2_rte|assin2_rte|None|15": { "f1_macro,all": 0.5097556100727492, "acc,all": 0.5837418300653595, "main_score": 0.5097556100727492 }, "harness|assin2_sts|assin2_sts|None|15": { "pearson,all": 0.04176979032426292, "mse,all": 2.5503267973856203, "main_score": 0.04176979032426292 }, "harness|faquad_nli|faquad_nli|None|15": { "f1_macro,all": 0.4396551724137931, "acc,all": 0.7846153846153846, "main_score": 0.4396551724137931 }, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": { "f1_macro,all": 0.03209457599403715, "acc,all": 0.12, "main_score": 0.03209457599403715 }, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": { "f1_macro,all": 0.3932038834951456, "acc,all": 0.648, "main_score": 0.3932038834951456 }, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": { "f1_macro,all": 0.3038200710445833, "acc,all": 0.734, "main_score": 0.3038200710445833 }, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": { "f1_macro,all": 0.17559880167534636, "acc,all": 0.35, "main_score": 0.17559880167534636 } }
{ "harness|enem_challenge|enem_challenge": "LM Harness task", "harness|bluex|bluex": "LM Harness task", "harness|oab_exams|oab_exams": "LM Harness task", "harness|assin2_rte|assin2_rte": "LM Harness task", "harness|assin2_sts|assin2_sts": "LM Harness task", "harness|faquad_nli|faquad_nli": "LM Harness task", "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por": "LM Harness task", "harness|sparrow_pt|sparrow_hate-2019-fortuna-por": "LM Harness task", "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por": "LM Harness task", "harness|sparrow_pt|sparrow_sentiment-2018-brum-por": "LM Harness task" }
{ "all": 0, "harness|enem_challenge|enem_challenge": 1, "harness|bluex|bluex": 1, "harness|oab_exams|oab_exams": 1.4, "harness|assin2_rte|assin2_rte": 1, "harness|assin2_sts|assin2_sts": 1, "harness|faquad_nli|faquad_nli": 1, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por": 1, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por": 1, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por": 1, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por": 1 }
{ "harness|enem_challenge|enem_challenge|None|3": { "sample_size": 1429, "truncated": 103, "non_truncated": 1326, "padded": 0, "non_padded": 1429, "fewshots_truncated": 106, "mean_seq_length": 1743.6738978306507, "min_seq_length": 1451, "max_seq_length": 2747, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 2.925822253324003 }, "harness|bluex|bluex|None|3": { "sample_size": 719, "truncated": 8, "non_truncated": 711, "padded": 0, "non_padded": 719, "fewshots_truncated": 8, "mean_seq_length": 1450.3504867872045, "min_seq_length": 1045, "max_seq_length": 2334, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 2.988873435326843 }, "harness|oab_exams|oab_exams|None|3": { "sample_size": 2195, "truncated": 1, "non_truncated": 2194, "padded": 0, "non_padded": 2195, "fewshots_truncated": 1, "mean_seq_length": 1479.6788154897495, "min_seq_length": 1182, "max_seq_length": 2047, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 2.9995444191343963 }, "harness|assin2_rte|assin2_rte|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1306.5322712418301, "min_seq_length": 1282, "max_seq_length": 1381, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|assin2_sts|assin2_sts|None|15": { "sample_size": 2448, "truncated": 0, "non_truncated": 2448, "padded": 0, "non_padded": 2448, "fewshots_truncated": 0, "mean_seq_length": 1539.5322712418301, "min_seq_length": 1515, "max_seq_length": 1614, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|faquad_nli|faquad_nli|None|15": { "sample_size": 650, "truncated": 0, "non_truncated": 650, "padded": 0, "non_padded": 650, "fewshots_truncated": 0, "mean_seq_length": 1706.3892307692308, "min_seq_length": 1646, "max_seq_length": 1845, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 15 }, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1805.678, "min_seq_length": 1782, "max_seq_length": 1838, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1824.428, "min_seq_length": 1798, "max_seq_length": 1865, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1503.284, "min_seq_length": 1485, "max_seq_length": 1540, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 }, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": { "sample_size": 500, "truncated": 0, "non_truncated": 500, "padded": 0, "non_padded": 500, "fewshots_truncated": 0, "mean_seq_length": 1670.518, "min_seq_length": 1652, "max_seq_length": 1706, "max_ctx_length": 2016, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 25 } }
{ "truncated": 112, "non_truncated": 11777, "padded": 0, "non_padded": 11889, "fewshots_truncated": 115 }
{ "start_date": "2024-02-08T16-51-46.875316", "start_time": 1707411107.4503539, "end_time": 1707424163.5701303, "total_evaluation_time_seconds": 13056.11977648735, "has_chat_template": false, "chat_type": null, "n_gpus": 1, "accelerate_num_process": null, "model_sha": "910e5d769c3ebfcbb6577c7793578a7b4ea87680", "model_dtype": "float16", "model_memory_footprint": 78603124832, "model_num_parameters": 39200899072, "model_is_loaded_in_4bit": false, "model_is_loaded_in_8bit": false, "model_is_quantized": null, "model_device": "cuda:1", "batch_size": 1, "max_length": 1024, "max_ctx_length": 992, "max_gen_toks": 32, "model_name": "AI-Sweden-Models/gpt-sw3-40b", "job_id": 103, "model_id": "AI-Sweden-Models/gpt-sw3-40b_eval_request_False_float16_Original", "model_base_model": "", "model_weight_type": "Original", "model_revision": "main", "model_private": false, "model_type": "🟒 : pretrained", "model_architectures": "GPT2LMHeadModel", "submitted_time": "2024-02-05T23:15:47", "lm_eval_model_type": "huggingface", "eval_version": "1.0.0" }
{ "all_grouped_average": 0.28943668316671617, "all_grouped_npm": 0.021122281612247988, "all_grouped": { "enem_challenge": 0.23722883135059483, "bluex": 0.25312934631432543, "oab_exams": 0.24646924829157174, "assin2_rte": 0.5670142345370424, "assin2_sts": 0.03118303336610547, "faquad_nli": 0.43118086176220316, "sparrow_pt": 0.2598512265451701 }, "all": { "harness|enem_challenge|enem_challenge|None|3": 0.23722883135059483, "harness|bluex|bluex|None|3": 0.25312934631432543, "harness|oab_exams|oab_exams|None|3": 0.24646924829157174, "harness|assin2_rte|assin2_rte|None|15": 0.5670142345370424, "harness|assin2_sts|assin2_sts|None|15": 0.03118303336610547, "harness|faquad_nli|faquad_nli|None|15": 0.43118086176220316, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": 0.014116510737830988, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": 0.39246658566221143, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": 0.32415879727507635, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": 0.30866301250556166 }, "harness|enem_challenge|enem_challenge|None|3": { "acc,all": 0.23722883135059483, "acc,exam_id__2023": 0.22962962962962963, "acc,exam_id__2009": 0.1565217391304348, "acc,exam_id__2013": 0.26851851851851855, "acc,exam_id__2011": 0.2564102564102564, "acc,exam_id__2014": 0.23853211009174313, "acc,exam_id__2016_2": 0.2926829268292683, "acc,exam_id__2022": 0.24060150375939848, "acc,exam_id__2017": 0.27586206896551724, "acc,exam_id__2016": 0.21487603305785125, "acc,exam_id__2015": 0.2605042016806723, "acc,exam_id__2012": 0.19827586206896552, "acc,exam_id__2010": 0.21367521367521367, "main_score": 0.23722883135059483 }, "harness|bluex|bluex|None|3": { "acc,all": 0.25312934631432543, "acc,exam_id__USP_2018": 0.2222222222222222, "acc,exam_id__USP_2024": 0.12195121951219512, "acc,exam_id__USP_2021": 0.23076923076923078, "acc,exam_id__UNICAMP_2021_1": 0.41304347826086957, "acc,exam_id__USP_2020": 0.26785714285714285, "acc,exam_id__USP_2022": 0.2857142857142857, "acc,exam_id__UNICAMP_2022": 0.3076923076923077, "acc,exam_id__UNICAMP_2020": 0.3090909090909091, "acc,exam_id__USP_2023": 0.20454545454545456, "acc,exam_id__USP_2019": 0.2, "acc,exam_id__UNICAMP_2023": 0.3023255813953488, "acc,exam_id__UNICAMP_2024": 0.28888888888888886, "acc,exam_id__UNICAMP_2021_2": 0.17647058823529413, "acc,exam_id__UNICAMP_2019": 0.22, "acc,exam_id__UNICAMP_2018": 0.24074074074074073, "main_score": 0.25312934631432543 }, "harness|oab_exams|oab_exams|None|3": { "acc,all": 0.24646924829157174, "acc,exam_id__2010-01": 0.23529411764705882, "acc,exam_id__2017-24": 0.225, "acc,exam_id__2016-21": 0.2625, "acc,exam_id__2016-19": 0.23076923076923078, "acc,exam_id__2013-12": 0.175, "acc,exam_id__2015-17": 0.28205128205128205, "acc,exam_id__2012-09": 0.23376623376623376, "acc,exam_id__2014-14": 0.35, "acc,exam_id__2013-10": 0.2875, "acc,exam_id__2012-06a": 0.3, "acc,exam_id__2016-20a": 0.3, "acc,exam_id__2018-25": 0.2625, "acc,exam_id__2011-04": 0.2375, "acc,exam_id__2011-05": 0.275, "acc,exam_id__2017-22": 0.2125, "acc,exam_id__2014-13": 0.275, "acc,exam_id__2012-08": 0.25, "acc,exam_id__2013-11": 0.1625, "acc,exam_id__2011-03": 0.24242424242424243, "acc,exam_id__2012-07": 0.3, "acc,exam_id__2015-16": 0.2125, "acc,exam_id__2014-15": 0.23076923076923078, "acc,exam_id__2010-02": 0.19, "acc,exam_id__2016-20": 0.2125, "acc,exam_id__2017-23": 0.2375, "acc,exam_id__2012-06": 0.275, "acc,exam_id__2015-18": 0.2125, "main_score": 0.24646924829157174 }, "harness|assin2_rte|assin2_rte|None|15": { "f1_macro,all": 0.5670142345370424, "acc,all": 0.6254084967320261, "main_score": 0.5670142345370424 }, "harness|assin2_sts|assin2_sts|None|15": { "pearson,all": 0.03118303336610547, "mse,all": 2.3400939542483665, "main_score": 0.03118303336610547 }, "harness|faquad_nli|faquad_nli|None|15": { "f1_macro,all": 0.43118086176220316, "acc,all": 0.4430769230769231, "main_score": 0.43118086176220316 }, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": { "f1_macro,all": 0.014116510737830988, "acc,all": 0.036, "main_score": 0.014116510737830988 }, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": { "f1_macro,all": 0.39246658566221143, "acc,all": 0.646, "main_score": 0.39246658566221143 }, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": { "f1_macro,all": 0.32415879727507635, "acc,all": 0.412, "main_score": 0.32415879727507635 }, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": { "f1_macro,all": 0.30866301250556166, "acc,all": 0.372, "main_score": 0.30866301250556166 } }
{ "harness|enem_challenge|enem_challenge": "LM Harness task", "harness|bluex|bluex": "LM Harness task", "harness|oab_exams|oab_exams": "LM Harness task", "harness|assin2_rte|assin2_rte": "LM Harness task", "harness|assin2_sts|assin2_sts": "LM Harness task", "harness|faquad_nli|faquad_nli": "LM Harness task", "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por": "LM Harness task", "harness|sparrow_pt|sparrow_hate-2019-fortuna-por": "LM Harness task", "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por": "LM Harness task", "harness|sparrow_pt|sparrow_sentiment-2018-brum-por": "LM Harness task" }
{ "all": 0, "harness|enem_challenge|enem_challenge": 1, "harness|bluex|bluex": 1, "harness|oab_exams|oab_exams": 1.4, "harness|assin2_rte|assin2_rte": 1, "harness|assin2_sts|assin2_sts": 1, "harness|faquad_nli|faquad_nli": 1, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por": 1, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por": 1, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por": 1, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por": 1 }
{ "harness|enem_challenge|enem_challenge|None|3": { "sample_size": 1429, "truncated": 1429, "non_truncated": 0, "padded": 0, "non_padded": 1429, "fewshots_truncated": 3465, "mean_seq_length": 1743.6738978306507, "min_seq_length": 1451, "max_seq_length": 2747, "max_ctx_length": 992, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 0.5752274317704689 }, "harness|bluex|bluex|None|3": { "sample_size": 719, "truncated": 719, "non_truncated": 0, "padded": 0, "non_padded": 719, "fewshots_truncated": 1356, "mean_seq_length": 1450.3504867872045, "min_seq_length": 1045, "max_seq_length": 2334, "max_ctx_length": 992, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 1.1140472878998608 }, "harness|oab_exams|oab_exams|None|3": { "sample_size": 2195, "truncated": 2195, "non_truncated": 0, "padded": 0, "non_padded": 2195, "fewshots_truncated": 3699, "mean_seq_length": 1479.6788154897495, "min_seq_length": 1182, "max_seq_length": 2047, "max_ctx_length": 992, "max_gen_toks": 32, "mean_original_fewshots_size": 3, "mean_effective_fewshot_size": 1.3148063781321184 }, "harness|assin2_rte|assin2_rte|None|15": { "sample_size": 2448, "truncated": 2448, "non_truncated": 0, "padded": 0, "non_padded": 2448, "fewshots_truncated": 12142, "mean_seq_length": 1306.5322712418301, "min_seq_length": 1282, "max_seq_length": 1381, "max_ctx_length": 992, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 10.040032679738562 }, "harness|assin2_sts|assin2_sts|None|15": { "sample_size": 2448, "truncated": 2448, "non_truncated": 0, "padded": 0, "non_padded": 2448, "fewshots_truncated": 16744, "mean_seq_length": 1539.5322712418301, "min_seq_length": 1515, "max_seq_length": 1614, "max_ctx_length": 992, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 8.160130718954248 }, "harness|faquad_nli|faquad_nli|None|15": { "sample_size": 650, "truncated": 650, "non_truncated": 0, "padded": 0, "non_padded": 650, "fewshots_truncated": 4873, "mean_seq_length": 1706.3892307692308, "min_seq_length": 1646, "max_seq_length": 1845, "max_ctx_length": 992, "max_gen_toks": 32, "mean_original_fewshots_size": 15, "mean_effective_fewshot_size": 7.503076923076923 }, "harness|sparrow_pt|sparrow_emotion-2021-cortiz-por|500|25": { "sample_size": 500, "truncated": 500, "non_truncated": 0, "padded": 0, "non_padded": 500, "fewshots_truncated": 6639, "mean_seq_length": 1805.678, "min_seq_length": 1782, "max_seq_length": 1838, "max_ctx_length": 992, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 11.722 }, "harness|sparrow_pt|sparrow_hate-2019-fortuna-por|500|25": { "sample_size": 500, "truncated": 500, "non_truncated": 0, "padded": 0, "non_padded": 500, "fewshots_truncated": 6200, "mean_seq_length": 1824.428, "min_seq_length": 1798, "max_seq_length": 1865, "max_ctx_length": 992, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 12.6 }, "harness|sparrow_pt|sparrow_sentiment-2016-mozetic-por|500|25": { "sample_size": 500, "truncated": 500, "non_truncated": 0, "padded": 0, "non_padded": 500, "fewshots_truncated": 5015, "mean_seq_length": 1503.284, "min_seq_length": 1485, "max_seq_length": 1540, "max_ctx_length": 992, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 14.97 }, "harness|sparrow_pt|sparrow_sentiment-2018-brum-por|500|25": { "sample_size": 500, "truncated": 500, "non_truncated": 0, "padded": 0, "non_padded": 500, "fewshots_truncated": 6000, "mean_seq_length": 1670.518, "min_seq_length": 1652, "max_seq_length": 1706, "max_ctx_length": 992, "max_gen_toks": 32, "mean_original_fewshots_size": 25, "mean_effective_fewshot_size": 13 } }
{ "truncated": 11889, "non_truncated": 0, "padded": 0, "non_padded": 11889, "fewshots_truncated": 66133 }
End of preview.

No dataset card yet

New: Create and edit this dataset card directly on the website!

Contribute a Dataset Card
Downloads last month
0
Add dataset card