open-r1-eval-leaderboard
/
eval_results
/HuggingFaceH4
/mistral-7b-kto
/v1.0
/bbh
/results_2024-03-26T09-30-21.406885.json
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null, | |
"job_id": "", | |
"start_time": 1526145.70482935, | |
"end_time": 1526414.705946317, | |
"total_evaluation_time_secondes": "269.0011169668287", | |
"model_name": "HuggingFaceH4/mistral-7b-kto", | |
"model_sha": "ba517352d0ccbfbbb40cb6c11b66286784ea857a", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "13.99 GB", | |
"config": null | |
}, | |
"results": { | |
"harness|bbh:causal_judgment|3": { | |
"em": 0.5508021390374331, | |
"em_stderr": 0.0364720501817238, | |
"qem": 0.5508021390374331, | |
"qem_stderr": 0.0364720501817238, | |
"pem": 0.5668449197860963, | |
"pem_stderr": 0.03633267411102586, | |
"pqem": 0.5668449197860963, | |
"pqem_stderr": 0.03633267411102586, | |
"perfect_em": 0.5508021390374331, | |
"perfect_em_stderr": 0.0364720501817238 | |
}, | |
"harness|bbh:date_understanding|3": { | |
"em": 0.48, | |
"em_stderr": 0.031660853408495185, | |
"qem": 0.48, | |
"qem_stderr": 0.031660853408495185, | |
"pem": 0.48, | |
"pem_stderr": 0.031660853408495185, | |
"pqem": 0.568, | |
"pqem_stderr": 0.03139181076542941, | |
"perfect_em": 0.48, | |
"perfect_em_stderr": 0.031660853408495185 | |
}, | |
"harness|bbh:disambiguation_qa|3": { | |
"em": 0.22, | |
"em_stderr": 0.02625179282460584, | |
"qem": 0.22, | |
"qem_stderr": 0.02625179282460584, | |
"pem": 0.58, | |
"pem_stderr": 0.03127799950463661, | |
"pqem": 0.716, | |
"pqem_stderr": 0.028576958730437408, | |
"perfect_em": 0.22, | |
"perfect_em_stderr": 0.02625179282460584 | |
}, | |
"harness|bbh:geometric_shapes|3": { | |
"em": 0.188, | |
"em_stderr": 0.024760377727750502, | |
"qem": 0.188, | |
"qem_stderr": 0.024760377727750502, | |
"pem": 0.188, | |
"pem_stderr": 0.024760377727750502, | |
"pqem": 0.188, | |
"pqem_stderr": 0.024760377727750502, | |
"perfect_em": 0.188, | |
"perfect_em_stderr": 0.024760377727750502 | |
}, | |
"harness|bbh:logical_deduction_five_objects|3": { | |
"em": 0.304, | |
"em_stderr": 0.029150213374159673, | |
"qem": 0.304, | |
"qem_stderr": 0.029150213374159673, | |
"pem": 0.324, | |
"pem_stderr": 0.02965829492454557, | |
"pqem": 0.476, | |
"pqem_stderr": 0.03164968895968782, | |
"perfect_em": 0.304, | |
"perfect_em_stderr": 0.029150213374159673 | |
}, | |
"harness|bbh:logical_deduction_seven_objects|3": { | |
"em": 0.28, | |
"em_stderr": 0.02845414827783232, | |
"qem": 0.28, | |
"qem_stderr": 0.02845414827783232, | |
"pem": 0.292, | |
"pem_stderr": 0.02881432040220564, | |
"pqem": 0.42, | |
"pqem_stderr": 0.03127799950463661, | |
"perfect_em": 0.28, | |
"perfect_em_stderr": 0.02845414827783232 | |
}, | |
"harness|bbh:logical_deduction_three_objects|3": { | |
"em": 0.472, | |
"em_stderr": 0.0316364895315444, | |
"qem": 0.472, | |
"qem_stderr": 0.0316364895315444, | |
"pem": 0.48, | |
"pem_stderr": 0.031660853408495185, | |
"pqem": 0.752, | |
"pqem_stderr": 0.027367497504863555, | |
"perfect_em": 0.472, | |
"perfect_em_stderr": 0.0316364895315444 | |
}, | |
"harness|bbh:movie_recommendation|3": { | |
"em": 0.3895582329317269, | |
"em_stderr": 0.030965812059094804, | |
"qem": 0.3895582329317269, | |
"qem_stderr": 0.030965812059094804, | |
"pem": 0.4819277108433735, | |
"pem_stderr": 0.03172928536632067, | |
"pqem": 0.606425702811245, | |
"pqem_stderr": 0.031022466480549557, | |
"perfect_em": 0.3895582329317269, | |
"perfect_em_stderr": 0.030965812059094804 | |
}, | |
"harness|bbh:navigate|3": { | |
"em": 0.604, | |
"em_stderr": 0.030993197854577846, | |
"qem": 0.604, | |
"qem_stderr": 0.030993197854577846, | |
"pem": 0.604, | |
"pem_stderr": 0.030993197854577846, | |
"pqem": 0.604, | |
"pqem_stderr": 0.030993197854577846, | |
"perfect_em": 0.604, | |
"perfect_em_stderr": 0.030993197854577846 | |
}, | |
"harness|bbh:reasoning_about_colored_objects|3": { | |
"em": 0.356, | |
"em_stderr": 0.03034368065715321, | |
"qem": 0.356, | |
"qem_stderr": 0.03034368065715321, | |
"pem": 0.356, | |
"pem_stderr": 0.03034368065715321, | |
"pqem": 0.516, | |
"pqem_stderr": 0.03166998503010742, | |
"perfect_em": 0.356, | |
"perfect_em_stderr": 0.03034368065715321 | |
}, | |
"harness|bbh:ruin_names|3": { | |
"em": 0.2620967741935484, | |
"em_stderr": 0.02798221349117912, | |
"qem": 0.2620967741935484, | |
"qem_stderr": 0.02798221349117912, | |
"pem": 0.2701612903225806, | |
"pem_stderr": 0.02825377818838202, | |
"pqem": 0.46774193548387094, | |
"pqem_stderr": 0.03174795841398569, | |
"perfect_em": 0.2620967741935484, | |
"perfect_em_stderr": 0.02798221349117912 | |
}, | |
"harness|bbh:salient_translation_error_detection|3": { | |
"em": 0.268, | |
"em_stderr": 0.02806876238252669, | |
"qem": 0.268, | |
"qem_stderr": 0.02806876238252669, | |
"pem": 0.268, | |
"pem_stderr": 0.02806876238252669, | |
"pqem": 0.408, | |
"pqem_stderr": 0.031145209846548495, | |
"perfect_em": 0.268, | |
"perfect_em_stderr": 0.02806876238252669 | |
}, | |
"harness|bbh:snarks|3": { | |
"em": 0.3707865168539326, | |
"em_stderr": 0.03630566062020186, | |
"qem": 0.3707865168539326, | |
"qem_stderr": 0.03630566062020186, | |
"pem": 0.46629213483146065, | |
"pem_stderr": 0.03749680060368987, | |
"pqem": 0.6685393258426966, | |
"pqem_stderr": 0.03538285323537675, | |
"perfect_em": 0.3707865168539326, | |
"perfect_em_stderr": 0.03630566062020186 | |
}, | |
"harness|bbh:sports_understanding|3": { | |
"em": 0.316, | |
"em_stderr": 0.02946265759857866, | |
"qem": 0.316, | |
"qem_stderr": 0.02946265759857866, | |
"pem": 0.788, | |
"pem_stderr": 0.02590188469054116, | |
"pqem": 0.788, | |
"pqem_stderr": 0.02590188469054116, | |
"perfect_em": 0.316, | |
"perfect_em_stderr": 0.02946265759857866 | |
}, | |
"harness|bbh:temporal_sequences|3": { | |
"em": 0.136, | |
"em_stderr": 0.021723342617052062, | |
"qem": 0.136, | |
"qem_stderr": 0.021723342617052062, | |
"pem": 0.136, | |
"pem_stderr": 0.021723342617052062, | |
"pqem": 0.416, | |
"pqem_stderr": 0.03123585623701457, | |
"perfect_em": 0.136, | |
"perfect_em_stderr": 0.021723342617052062 | |
}, | |
"harness|bbh:tracking_shuffled_objects_five_objects|3": { | |
"em": 0.176, | |
"em_stderr": 0.02413349752545712, | |
"qem": 0.176, | |
"qem_stderr": 0.02413349752545712, | |
"pem": 0.18, | |
"pem_stderr": 0.024346890650293537, | |
"pqem": 0.38, | |
"pqem_stderr": 0.03076011604262603, | |
"perfect_em": 0.176, | |
"perfect_em_stderr": 0.02413349752545712 | |
}, | |
"harness|bbh:tracking_shuffled_objects_seven_objects|3": { | |
"em": 0.124, | |
"em_stderr": 0.02088638225867326, | |
"qem": 0.124, | |
"qem_stderr": 0.02088638225867326, | |
"pem": 0.124, | |
"pem_stderr": 0.02088638225867326, | |
"pqem": 0.268, | |
"pqem_stderr": 0.028068762382526695, | |
"perfect_em": 0.124, | |
"perfect_em_stderr": 0.02088638225867326 | |
}, | |
"harness|bbh:tracking_shuffled_objects_three_objects|3": { | |
"em": 0.316, | |
"em_stderr": 0.029462657598578683, | |
"qem": 0.316, | |
"qem_stderr": 0.029462657598578683, | |
"pem": 0.32, | |
"pem_stderr": 0.029561724955241044, | |
"pqem": 0.636, | |
"pqem_stderr": 0.030491555220405555, | |
"perfect_em": 0.316, | |
"perfect_em_stderr": 0.029462657598578683 | |
}, | |
"harness|bbh:_average|3": { | |
"em": 0.32295798127870223, | |
"em_stderr": 0.028817432777176948, | |
"qem": 0.32295798127870223, | |
"qem_stderr": 0.028817432777176948, | |
"pem": 0.38362366976575063, | |
"pem_stderr": 0.029081727983978106, | |
"pqem": 0.5247528824402172, | |
"pqem_stderr": 0.030543158485449497, | |
"perfect_em": 0.32295798127870223, | |
"perfect_em_stderr": 0.028817432777176948 | |
} | |
}, | |
"versions": { | |
"harness|bbh:causal_judgment|3": 0, | |
"harness|bbh:date_understanding|3": 0, | |
"harness|bbh:disambiguation_qa|3": 0, | |
"harness|bbh:geometric_shapes|3": 0, | |
"harness|bbh:logical_deduction_five_objects|3": 0, | |
"harness|bbh:logical_deduction_seven_objects|3": 0, | |
"harness|bbh:logical_deduction_three_objects|3": 0, | |
"harness|bbh:movie_recommendation|3": 0, | |
"harness|bbh:navigate|3": 0, | |
"harness|bbh:reasoning_about_colored_objects|3": 0, | |
"harness|bbh:ruin_names|3": 0, | |
"harness|bbh:salient_translation_error_detection|3": 0, | |
"harness|bbh:snarks|3": 0, | |
"harness|bbh:sports_understanding|3": 0, | |
"harness|bbh:temporal_sequences|3": 0, | |
"harness|bbh:tracking_shuffled_objects_five_objects|3": 0, | |
"harness|bbh:tracking_shuffled_objects_seven_objects|3": 0, | |
"harness|bbh:tracking_shuffled_objects_three_objects|3": 0 | |
}, | |
"config_tasks": { | |
"harness|bbh:causal_judgment": { | |
"name": "bbh:causal_judgment", | |
"prompt_function": "bbh_causal_judgment", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "causal_judgement", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 187, | |
"effective_num_docs": 187, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:date_understanding": { | |
"name": "bbh:date_understanding", | |
"prompt_function": "bbh_date_understanding", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "date_understanding", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:disambiguation_qa": { | |
"name": "bbh:disambiguation_qa", | |
"prompt_function": "bbh_disambiguation_qa", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "disambiguation_qa", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:geometric_shapes": { | |
"name": "bbh:geometric_shapes", | |
"prompt_function": "bbh_geometric_shapes", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "geometric_shapes", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:logical_deduction_five_objects": { | |
"name": "bbh:logical_deduction_five_objects", | |
"prompt_function": "bbh_logical_deduction_five_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "logical_deduction_five_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:logical_deduction_seven_objects": { | |
"name": "bbh:logical_deduction_seven_objects", | |
"prompt_function": "bbh_logical_deduction_seven_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "logical_deduction_seven_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:logical_deduction_three_objects": { | |
"name": "bbh:logical_deduction_three_objects", | |
"prompt_function": "bbh_logical_deduction_three_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "logical_deduction_three_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:movie_recommendation": { | |
"name": "bbh:movie_recommendation", | |
"prompt_function": "bbh_movie_recommendation", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "movie_recommendation", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 249, | |
"effective_num_docs": 249, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:navigate": { | |
"name": "bbh:navigate", | |
"prompt_function": "bbh_navigate", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "navigate", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:reasoning_about_colored_objects": { | |
"name": "bbh:reasoning_about_colored_objects", | |
"prompt_function": "bbh_reasoning_about_colored_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "reasoning_about_colored_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:ruin_names": { | |
"name": "bbh:ruin_names", | |
"prompt_function": "bbh_ruin_names", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "ruin_names", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 248, | |
"effective_num_docs": 248, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:salient_translation_error_detection": { | |
"name": "bbh:salient_translation_error_detection", | |
"prompt_function": "bbh_salient_translation_error_detection", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "salient_translation_error_detection", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:snarks": { | |
"name": "bbh:snarks", | |
"prompt_function": "bbh_snarks", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "snarks", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 178, | |
"effective_num_docs": 178, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:sports_understanding": { | |
"name": "bbh:sports_understanding", | |
"prompt_function": "bbh_sports_understanding", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "sports_understanding", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:temporal_sequences": { | |
"name": "bbh:temporal_sequences", | |
"prompt_function": "bbh_temporal_sequences", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "temporal_sequences", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:tracking_shuffled_objects_five_objects": { | |
"name": "bbh:tracking_shuffled_objects_five_objects", | |
"prompt_function": "bbh_tracking_shuffled_objects_five_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "tracking_shuffled_objects_five_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:tracking_shuffled_objects_seven_objects": { | |
"name": "bbh:tracking_shuffled_objects_seven_objects", | |
"prompt_function": "bbh_tracking_shuffled_objects_seven_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "tracking_shuffled_objects_seven_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:tracking_shuffled_objects_three_objects": { | |
"name": "bbh:tracking_shuffled_objects_three_objects", | |
"prompt_function": "bbh_tracking_shuffled_objects_three_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "tracking_shuffled_objects_three_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
} | |
}, | |
"summary_tasks": { | |
"harness|bbh:causal_judgment|3": { | |
"hashes": { | |
"hash_examples": "63218f5ae055ab2b", | |
"hash_full_prompts": "fa8168f39a475fb0", | |
"hash_input_tokens": "787f75e06fd43c0d", | |
"hash_cont_tokens": "a9897ca7ae85c12e" | |
}, | |
"truncated": 187, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 187, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:date_understanding|3": { | |
"hashes": { | |
"hash_examples": "f145c7a06def3c8e", | |
"hash_full_prompts": "2cceeea606638d49", | |
"hash_input_tokens": "10c13d6fb8af7c22", | |
"hash_cont_tokens": "cabbca449b68ce86" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:disambiguation_qa|3": { | |
"hashes": { | |
"hash_examples": "19677fd1773f7eb9", | |
"hash_full_prompts": "d8f1ba70c22ae578", | |
"hash_input_tokens": "c21a88707f480cab", | |
"hash_cont_tokens": "7d3f1881dc220111" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:geometric_shapes|3": { | |
"hashes": { | |
"hash_examples": "76c7b11a13cc72a9", | |
"hash_full_prompts": "52a60ed1d0113b8b", | |
"hash_input_tokens": "10e113b2cf3fa584", | |
"hash_cont_tokens": "f8197622419ea005" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:logical_deduction_five_objects|3": { | |
"hashes": { | |
"hash_examples": "0e958c856332a745", | |
"hash_full_prompts": "253aa9791c941909", | |
"hash_input_tokens": "0bc166cab0aed76a", | |
"hash_cont_tokens": "181ad1ad6ac183d1" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:logical_deduction_seven_objects|3": { | |
"hashes": { | |
"hash_examples": "ab9de25a5eb40d09", | |
"hash_full_prompts": "aa6117f601cd268e", | |
"hash_input_tokens": "ab99c78b48e3a0bb", | |
"hash_cont_tokens": "7f2da1aa476b95fa" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:logical_deduction_three_objects|3": { | |
"hashes": { | |
"hash_examples": "3c6bf52517714218", | |
"hash_full_prompts": "1892b050bc7848a4", | |
"hash_input_tokens": "a720b56aa7c52551", | |
"hash_cont_tokens": "52a6f1161a9a264a" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:movie_recommendation|3": { | |
"hashes": { | |
"hash_examples": "2d9dc4975935d31a", | |
"hash_full_prompts": "8e00606ed3407167", | |
"hash_input_tokens": "c825ab1c99245a17", | |
"hash_cont_tokens": "e16524bf716d503d" | |
}, | |
"truncated": 249, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 249, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:navigate|3": { | |
"hashes": { | |
"hash_examples": "ba91dcdb9a064255", | |
"hash_full_prompts": "8d50c5baf1df7aef", | |
"hash_input_tokens": "f234e6b28ea1fa49", | |
"hash_cont_tokens": "8efb02fb07bb7bba" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:reasoning_about_colored_objects|3": { | |
"hashes": { | |
"hash_examples": "a6ba328c4c3385d2", | |
"hash_full_prompts": "3d2441a21c12a960", | |
"hash_input_tokens": "f3b577892955aa84", | |
"hash_cont_tokens": "9d5dfa1f34a04d9c" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:ruin_names|3": { | |
"hashes": { | |
"hash_examples": "2ef28d5f2d4fdd25", | |
"hash_full_prompts": "ba95caa786f313b1", | |
"hash_input_tokens": "9954b30d4205604a", | |
"hash_cont_tokens": "8c6615983accdd0e" | |
}, | |
"truncated": 248, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 248, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:salient_translation_error_detection|3": { | |
"hashes": { | |
"hash_examples": "c13f25ec8ffed496", | |
"hash_full_prompts": "a8512d174e1cab8f", | |
"hash_input_tokens": "3e738df24b7eddf8", | |
"hash_cont_tokens": "6f387929f2f41a99" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:snarks|3": { | |
"hashes": { | |
"hash_examples": "5f6db7bff7f6f22e", | |
"hash_full_prompts": "ff91d81466b9041f", | |
"hash_input_tokens": "21388b09e13d0208", | |
"hash_cont_tokens": "896c8e1a238b7eb4" | |
}, | |
"truncated": 178, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 178, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:sports_understanding|3": { | |
"hashes": { | |
"hash_examples": "042afbe5d9c1f02d", | |
"hash_full_prompts": "a59324d9eb37e0f5", | |
"hash_input_tokens": "0ad41bb8d2290a5b", | |
"hash_cont_tokens": "d96237b2ab880bb1" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:temporal_sequences|3": { | |
"hashes": { | |
"hash_examples": "803a05f352eb6afc", | |
"hash_full_prompts": "1b3971192bf481e7", | |
"hash_input_tokens": "3051b60940ccceab", | |
"hash_cont_tokens": "15933169b4c72b92" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_five_objects|3": { | |
"hashes": { | |
"hash_examples": "2bbac6db7ab0d527", | |
"hash_full_prompts": "7ef4567d2fcf5094", | |
"hash_input_tokens": "b841310ee5531238", | |
"hash_cont_tokens": "edde026ca7e7988f" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_seven_objects|3": { | |
"hashes": { | |
"hash_examples": "845caf093ac2b58c", | |
"hash_full_prompts": "196a0f8712857624", | |
"hash_input_tokens": "3e738df24b7eddf8", | |
"hash_cont_tokens": "9a02c83e701bca54" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_three_objects|3": { | |
"hashes": { | |
"hash_examples": "9004f14d5a32b9a8", | |
"hash_full_prompts": "592a03f0518f17b6", | |
"hash_input_tokens": "19e0ef1dd5ae9d33", | |
"hash_cont_tokens": "3c5aafea9626475c" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "4ff1e3dc5703575d", | |
"hash_full_prompts": "0d80ce968d89d4ef", | |
"hash_input_tokens": "72bda1e7aeb34786", | |
"hash_cont_tokens": "1eec0370c759faa6" | |
}, | |
"truncated": 4362, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 4362, | |
"num_truncated_few_shots": 0 | |
} | |
} |