picocreator's picture
eval update
41502b1
raw
history blame contribute delete
No virus
1.87 kB
{
"results": {
"medqa_4options": {
"acc,none": 0.24901806755695208,
"acc_stderr,none": 0.012125135984037815,
"acc_norm,none": 0.24901806755695208,
"acc_norm_stderr,none": 0.012125135984037815,
"alias": "medqa_4options"
}
},
"configs": {
"medqa_4options": {
"task": "medqa_4options",
"dataset_path": "GBaker/MedQA-USMLE-4-options-hf",
"training_split": "train",
"validation_split": "validation",
"test_split": "test",
"doc_to_text": "def doc_to_text(doc) -> str:\n option_choices = {'A': doc[\"ending0\"], 'B': doc[\"ending1\"], 'C': doc[\"ending2\"], 'D': doc[\"ending3\"]}\n answers = \"\".join((f\"{k}. {v}\\n\") for k, v in option_choices.items())\n return f\"Question: {doc['sent1']}\\n{answers}Answer:\"\n",
"doc_to_target": "def doc_to_target(doc) -> int:\n return doc[\"label\"]\n",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
},
{
"metric": "acc_norm",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false
}
},
"versions": {
"medqa_4options": "Yaml"
},
"n-shot": {
"medqa_4options": 0
},
"config": {
"model": "hf",
"model_args": "pretrained=SmerkyG/rwkv6-world-1b6,dtype=bfloat16,trust_remote_code=True",
"batch_size": "auto",
"batch_sizes": [
32
],
"device": null,
"use_cache": null,
"limit": null,
"bootstrap_iters": 100000,
"gen_kwargs": null
},
"git_hash": "bff08d1"
}