ggbetz's picture
Upload data/mistralai/Mistral-7B-Instruct-v0.2/base/24-01-28-20:06:10.json with huggingface_hub
12ff182 verified
raw
history blame
No virus
14.2 kB
{
"results": {
"assumenda-debitis_logiqa_base": {
"acc,none": 0.3003194888178914,
"acc_stderr,none": 0.018335874932123606,
"alias": "assumenda-debitis_logiqa_base"
},
"assumenda-debitis_lsat-lr_base": {
"acc,none": 0.2823529411764706,
"acc_stderr,none": 0.019952288758197836,
"alias": "assumenda-debitis_lsat-lr_base"
},
"ipsum-aliquam_logiqa_base": {
"acc,none": 0.3019169329073482,
"acc_stderr,none": 0.01836357692961452,
"alias": "ipsum-aliquam_logiqa_base"
},
"ipsum-aliquam_lsat-lr_base": {
"acc,none": 0.2901960784313726,
"acc_stderr,none": 0.020116669259866334,
"alias": "ipsum-aliquam_lsat-lr_base"
},
"quia-provident_logiqa_base": {
"acc,none": 0.3035143769968051,
"acc_stderr,none": 0.01839101519560228,
"alias": "quia-provident_logiqa_base"
},
"quia-provident_lsat-lr_base": {
"acc,none": 0.28823529411764703,
"acc_stderr,none": 0.020076264678059525,
"alias": "quia-provident_lsat-lr_base"
},
"quos-recusandae_logiqa_base": {
"acc,none": 0.28913738019169327,
"acc_stderr,none": 0.018134473494097372,
"alias": "quos-recusandae_logiqa_base"
},
"quos-recusandae_lsat-lr_base": {
"acc,none": 0.26666666666666666,
"acc_stderr,none": 0.019600907899442414,
"alias": "quos-recusandae_lsat-lr_base"
},
"ratione-quaerat_logiqa_base": {
"acc,none": 0.2939297124600639,
"acc_stderr,none": 0.01822240539964836,
"alias": "ratione-quaerat_logiqa_base"
},
"ratione-quaerat_lsat-lr_base": {
"acc,none": 0.27450980392156865,
"acc_stderr,none": 0.01978043383787032,
"alias": "ratione-quaerat_lsat-lr_base"
},
"voluptates-expedita_logiqa_base": {
"acc,none": 0.30990415335463256,
"acc_stderr,none": 0.018498154246872536,
"alias": "voluptates-expedita_logiqa_base"
},
"voluptates-expedita_lsat-lr_base": {
"acc,none": 0.29215686274509806,
"acc_stderr,none": 0.02015661811960828,
"alias": "voluptates-expedita_lsat-lr_base"
}
},
"configs": {
"assumenda-debitis_logiqa_base": {
"task": "assumenda-debitis_logiqa_base",
"group": "logikon-bench",
"dataset_path": "logikon/cot-eval-traces",
"dataset_kwargs": {
"data_files": {
"test": "assumenda-debitis-logiqa/test-00000-of-00001.parquet"
}
},
"test_split": "test",
"doc_to_text": "<function doc_to_text at 0x7f33c1fe9fc0>",
"doc_to_target": "{{answer}}",
"doc_to_choice": "{{options}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"assumenda-debitis_lsat-lr_base": {
"task": "assumenda-debitis_lsat-lr_base",
"group": "logikon-bench",
"dataset_path": "logikon/cot-eval-traces",
"dataset_kwargs": {
"data_files": {
"test": "assumenda-debitis-lsat-lr/test-00000-of-00001.parquet"
}
},
"test_split": "test",
"doc_to_text": "<function doc_to_text at 0x7f33c1e21b40>",
"doc_to_target": "{{answer}}",
"doc_to_choice": "{{options}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"ipsum-aliquam_logiqa_base": {
"task": "ipsum-aliquam_logiqa_base",
"group": "logikon-bench",
"dataset_path": "logikon/cot-eval-traces",
"dataset_kwargs": {
"data_files": {
"test": "ipsum-aliquam-logiqa/test-00000-of-00001.parquet"
}
},
"test_split": "test",
"doc_to_text": "<function doc_to_text at 0x7f33c1e239a0>",
"doc_to_target": "{{answer}}",
"doc_to_choice": "{{options}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"ipsum-aliquam_lsat-lr_base": {
"task": "ipsum-aliquam_lsat-lr_base",
"group": "logikon-bench",
"dataset_path": "logikon/cot-eval-traces",
"dataset_kwargs": {
"data_files": {
"test": "ipsum-aliquam-lsat-lr/test-00000-of-00001.parquet"
}
},
"test_split": "test",
"doc_to_text": "<function doc_to_text at 0x7f33c1e22b00>",
"doc_to_target": "{{answer}}",
"doc_to_choice": "{{options}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"quia-provident_logiqa_base": {
"task": "quia-provident_logiqa_base",
"group": "logikon-bench",
"dataset_path": "logikon/cot-eval-traces",
"dataset_kwargs": {
"data_files": {
"test": "quia-provident-logiqa/test-00000-of-00001.parquet"
}
},
"test_split": "test",
"doc_to_text": "<function doc_to_text at 0x7f33c1e21a20>",
"doc_to_target": "{{answer}}",
"doc_to_choice": "{{options}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"quia-provident_lsat-lr_base": {
"task": "quia-provident_lsat-lr_base",
"group": "logikon-bench",
"dataset_path": "logikon/cot-eval-traces",
"dataset_kwargs": {
"data_files": {
"test": "quia-provident-lsat-lr/test-00000-of-00001.parquet"
}
},
"test_split": "test",
"doc_to_text": "<function doc_to_text at 0x7f33c1e22f80>",
"doc_to_target": "{{answer}}",
"doc_to_choice": "{{options}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"quos-recusandae_logiqa_base": {
"task": "quos-recusandae_logiqa_base",
"group": "logikon-bench",
"dataset_path": "logikon/cot-eval-traces",
"dataset_kwargs": {
"data_files": {
"test": "quos-recusandae-logiqa/test-00000-of-00001.parquet"
}
},
"test_split": "test",
"doc_to_text": "<function doc_to_text at 0x7f33c1e23ac0>",
"doc_to_target": "{{answer}}",
"doc_to_choice": "{{options}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"quos-recusandae_lsat-lr_base": {
"task": "quos-recusandae_lsat-lr_base",
"group": "logikon-bench",
"dataset_path": "logikon/cot-eval-traces",
"dataset_kwargs": {
"data_files": {
"test": "quos-recusandae-lsat-lr/test-00000-of-00001.parquet"
}
},
"test_split": "test",
"doc_to_text": "<function doc_to_text at 0x7f33c1e5a5f0>",
"doc_to_target": "{{answer}}",
"doc_to_choice": "{{options}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"ratione-quaerat_logiqa_base": {
"task": "ratione-quaerat_logiqa_base",
"group": "logikon-bench",
"dataset_path": "logikon/cot-eval-traces",
"dataset_kwargs": {
"data_files": {
"test": "ratione-quaerat-logiqa/test-00000-of-00001.parquet"
}
},
"test_split": "test",
"doc_to_text": "<function doc_to_text at 0x7f33c1e22560>",
"doc_to_target": "{{answer}}",
"doc_to_choice": "{{options}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"ratione-quaerat_lsat-lr_base": {
"task": "ratione-quaerat_lsat-lr_base",
"group": "logikon-bench",
"dataset_path": "logikon/cot-eval-traces",
"dataset_kwargs": {
"data_files": {
"test": "ratione-quaerat-lsat-lr/test-00000-of-00001.parquet"
}
},
"test_split": "test",
"doc_to_text": "<function doc_to_text at 0x7f33c1e23a30>",
"doc_to_target": "{{answer}}",
"doc_to_choice": "{{options}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"voluptates-expedita_logiqa_base": {
"task": "voluptates-expedita_logiqa_base",
"group": "logikon-bench",
"dataset_path": "logikon/cot-eval-traces",
"dataset_kwargs": {
"data_files": {
"test": "voluptates-expedita-logiqa/test-00000-of-00001.parquet"
}
},
"test_split": "test",
"doc_to_text": "<function doc_to_text at 0x7f33c1e58b80>",
"doc_to_target": "{{answer}}",
"doc_to_choice": "{{options}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
},
"voluptates-expedita_lsat-lr_base": {
"task": "voluptates-expedita_lsat-lr_base",
"group": "logikon-bench",
"dataset_path": "logikon/cot-eval-traces",
"dataset_kwargs": {
"data_files": {
"test": "voluptates-expedita-lsat-lr/test-00000-of-00001.parquet"
}
},
"test_split": "test",
"doc_to_text": "<function doc_to_text at 0x7f33c1e58d30>",
"doc_to_target": "{{answer}}",
"doc_to_choice": "{{options}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 0.0
}
}
},
"versions": {
"assumenda-debitis_logiqa_base": 0.0,
"assumenda-debitis_lsat-lr_base": 0.0,
"ipsum-aliquam_logiqa_base": 0.0,
"ipsum-aliquam_lsat-lr_base": 0.0,
"quia-provident_logiqa_base": 0.0,
"quia-provident_lsat-lr_base": 0.0,
"quos-recusandae_logiqa_base": 0.0,
"quos-recusandae_lsat-lr_base": 0.0,
"ratione-quaerat_logiqa_base": 0.0,
"ratione-quaerat_lsat-lr_base": 0.0,
"voluptates-expedita_logiqa_base": 0.0,
"voluptates-expedita_lsat-lr_base": 0.0
},
"n-shot": {
"assumenda-debitis_logiqa_base": 0,
"assumenda-debitis_lsat-lr_base": 0,
"ipsum-aliquam_logiqa_base": 0,
"ipsum-aliquam_lsat-lr_base": 0,
"quia-provident_logiqa_base": 0,
"quia-provident_lsat-lr_base": 0,
"quos-recusandae_logiqa_base": 0,
"quos-recusandae_lsat-lr_base": 0,
"ratione-quaerat_logiqa_base": 0,
"ratione-quaerat_lsat-lr_base": 0,
"voluptates-expedita_logiqa_base": 0,
"voluptates-expedita_lsat-lr_base": 0
},
"config": {
"model": "vllm",
"model_args": "pretrained=mistralai/Mistral-7B-Instruct-v0.2,revision=main,dtype=auto,gpu_memory_utilization=0.9,trust_remote_code=true,max_length=4096",
"batch_size": "auto",
"batch_sizes": [],
"device": null,
"use_cache": null,
"limit": null,
"bootstrap_iters": 100000,
"gen_kwargs": null
},
"git_hash": "92b0637"
}