lewtun's picture
lewtun HF staff
Upload eval_results/meta-llama/Meta-Llama-3-70B-Instruct/main/ifeval/results_2024-04-21T22-19-20.627905.json with huggingface_hub
3ff1630 verified
raw
history blame
3.04 kB
{
"config_general": {
"lighteval_sha": "?",
"num_fewshot_seeds": 1,
"override_batch_size": 4,
"max_samples": null,
"job_id": "",
"start_time": 3878304.964984352,
"end_time": 3887782.490878361,
"total_evaluation_time_secondes": "9477.525894009043",
"model_name": "meta-llama/Meta-Llama-3-70B-Instruct",
"model_sha": "5fcb2901844dde3111159f24205b71c25900ffbd",
"model_dtype": "torch.bfloat16",
"model_size": "131.73 GB",
"config": null
},
"results": {
"extended|ifeval|0": {
"prompt_level_strict_acc": 0.3955637707948244,
"prompt_level_strict_acc_stderr": 0.02104198070302088,
"inst_level_strict_acc": 0.5359712230215827,
"inst_level_strict_acc_stderr": 0.0005385092733933141,
"prompt_level_loose_acc": 0.40850277264325324,
"prompt_level_loose_acc_stderr": 0.021153244098720363,
"inst_level_loose_acc": 0.5479616306954437,
"inst_level_loose_acc_stderr": 0.000532906634394331
},
"all": {
"prompt_level_strict_acc": 0.3955637707948244,
"prompt_level_strict_acc_stderr": 0.02104198070302088,
"inst_level_strict_acc": 0.5359712230215827,
"inst_level_strict_acc_stderr": 0.0005385092733933141,
"prompt_level_loose_acc": 0.40850277264325324,
"prompt_level_loose_acc_stderr": 0.021153244098720363,
"inst_level_loose_acc": 0.5479616306954437,
"inst_level_loose_acc_stderr": 0.000532906634394331
}
},
"versions": {
"extended|ifeval|0": 0
},
"config_tasks": {
"extended|ifeval": {
"name": "ifeval",
"prompt_function": "ifeval_prompt",
"hf_repo": "wis-k/instruction-following-eval",
"hf_subset": "default",
"metric": [
"ifeval_metric"
],
"hf_avail_splits": [
"train"
],
"evaluation_splits": [
"train"
],
"few_shots_split": "train",
"few_shots_select": "random_sampling",
"generation_size": 1280,
"stop_sequence": [],
"output_regex": null,
"frozen": false,
"suite": [
"extended"
],
"original_num_docs": 541,
"effective_num_docs": 541,
"trust_dataset": null,
"must_remove_duplicate_docs": null
}
},
"summary_tasks": {
"extended|ifeval|0": {
"hashes": {
"hash_examples": "e99cbf567588d7c6",
"hash_full_prompts": "30fe3a1c6acd1de0",
"hash_input_tokens": "1d6107f4cf33c68a",
"hash_cont_tokens": "834938d3871738d2"
},
"truncated": 541,
"non_truncated": 0,
"padded": 116,
"non_padded": 425,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
}
},
"summary_general": {
"hashes": {
"hash_examples": "ea046ab2c6fc5928",
"hash_full_prompts": "bcd0aa2470bb7894",
"hash_input_tokens": "7d90b44b7e9fb16b",
"hash_cont_tokens": "b54560f50689ccb8"
},
"truncated": 541,
"non_truncated": 0,
"padded": 116,
"non_padded": 425,
"num_truncated_few_shots": 0
}
}