lewtun's picture
lewtun HF staff
Upload eval_results/NousResearch/Hermes-2-Pro-Mistral-7B/main/ifeval/results_2024-03-28T19-53-41.347489.json with huggingface_hub
4e7ace5 verified
raw
history blame
3.03 kB
{
"config_general": {
"lighteval_sha": "?",
"num_fewshot_seeds": 1,
"override_batch_size": 4,
"max_samples": null,
"job_id": "",
"start_time": 1250990.191459896,
"end_time": 1251691.434393512,
"total_evaluation_time_secondes": "701.2429336160421",
"model_name": "NousResearch/Hermes-2-Pro-Mistral-7B",
"model_sha": "4d50786bb33ce7c31697932a0ebe01fd8a373a4a",
"model_dtype": "torch.bfloat16",
"model_size": "13.99 GB",
"config": null
},
"results": {
"extended|ifeval|0": {
"prompt_level_strict_acc": 0.5083179297597042,
"prompt_level_strict_acc_stderr": 0.021513596564021183,
"inst_level_strict_acc": 0.6247002398081535,
"inst_level_strict_acc_stderr": 0.000523733511365831,
"prompt_level_loose_acc": 0.5415896487985212,
"prompt_level_loose_acc_stderr": 0.021442010560476534,
"inst_level_loose_acc": 0.6606714628297362,
"inst_level_loose_acc_stderr": 0.0005031302430479284
},
"all": {
"prompt_level_strict_acc": 0.5083179297597042,
"prompt_level_strict_acc_stderr": 0.021513596564021183,
"inst_level_strict_acc": 0.6247002398081535,
"inst_level_strict_acc_stderr": 0.000523733511365831,
"prompt_level_loose_acc": 0.5415896487985212,
"prompt_level_loose_acc_stderr": 0.021442010560476534,
"inst_level_loose_acc": 0.6606714628297362,
"inst_level_loose_acc_stderr": 0.0005031302430479284
}
},
"versions": {
"extended|ifeval|0": 0
},
"config_tasks": {
"extended|ifeval": {
"name": "ifeval",
"prompt_function": "ifeval_prompt",
"hf_repo": "wis-k/instruction-following-eval",
"hf_subset": "default",
"metric": [
"ifeval_metric"
],
"hf_avail_splits": [
"train"
],
"evaluation_splits": [
"train"
],
"few_shots_split": "train",
"few_shots_select": "random_sampling",
"generation_size": 1280,
"stop_sequence": [],
"output_regex": null,
"frozen": false,
"suite": [
"extended"
],
"original_num_docs": 541,
"effective_num_docs": 541,
"trust_dataset": null,
"must_remove_duplicate_docs": null
}
},
"summary_tasks": {
"extended|ifeval|0": {
"hashes": {
"hash_examples": "e99cbf567588d7c6",
"hash_full_prompts": "7ea7bf2a8edba8f4",
"hash_input_tokens": "cbf56a3804371c84",
"hash_cont_tokens": "0dcd3591fb4d2c9a"
},
"truncated": 541,
"non_truncated": 0,
"padded": 123,
"non_padded": 418,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
}
},
"summary_general": {
"hashes": {
"hash_examples": "ea046ab2c6fc5928",
"hash_full_prompts": "45f8422f6ad2da79",
"hash_input_tokens": "e8f96928ae2b5d8c",
"hash_cont_tokens": "8d5c07043ba70e59"
},
"truncated": 541,
"non_truncated": 0,
"padded": 123,
"non_padded": 418,
"num_truncated_few_shots": 0
}
}