|
{ |
|
"results": { |
|
"arc_easy": { |
|
"acc,none": 0.3404882154882155, |
|
"acc_stderr,none": 0.009723676813825861, |
|
"acc_norm,none": 0.3345959595959596, |
|
"acc_norm_stderr,none": 0.00968213772432791, |
|
"alias": "arc_easy" |
|
}, |
|
"lambada_openai": { |
|
"perplexity,none": 371.27644162995415, |
|
"perplexity_stderr,none": 20.709813793099112, |
|
"acc,none": 0.19794294585678246, |
|
"acc_stderr,none": 0.005551172446984931, |
|
"alias": "lambada_openai" |
|
}, |
|
"piqa": { |
|
"acc,none": 0.5554951033732318, |
|
"acc_stderr,none": 0.011593746871584154, |
|
"acc_norm,none": 0.5549510337323177, |
|
"acc_norm_stderr,none": 0.011595157509775767, |
|
"alias": "piqa" |
|
}, |
|
"sciq": { |
|
"acc,none": 0.574, |
|
"acc_stderr,none": 0.01564508768811381, |
|
"acc_norm,none": 0.565, |
|
"acc_norm_stderr,none": 0.0156850572527172, |
|
"alias": "sciq" |
|
}, |
|
"wikitext": { |
|
"word_perplexity,none": 12021.905838477027, |
|
"word_perplexity_stderr,none": "N/A", |
|
"byte_perplexity,none": 5.793958843287674, |
|
"byte_perplexity_stderr,none": "N/A", |
|
"bits_per_byte,none": 2.5345494366283963, |
|
"bits_per_byte_stderr,none": "N/A", |
|
"alias": "wikitext" |
|
}, |
|
"winogrande": { |
|
"acc,none": 0.5043409629044988, |
|
"acc_stderr,none": 0.0140519560640769, |
|
"alias": "winogrande" |
|
} |
|
}, |
|
"configs": { |
|
"arc_easy": { |
|
"task": "arc_easy", |
|
"group": [ |
|
"ai2_arc" |
|
], |
|
"dataset_path": "ai2_arc", |
|
"dataset_name": "ARC-Easy", |
|
"training_split": "train", |
|
"validation_split": "validation", |
|
"test_split": "test", |
|
"doc_to_text": "Question: {{question}}\nAnswer:", |
|
"doc_to_target": "{{choices.label.index(answerKey)}}", |
|
"doc_to_choice": "{{choices.text}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
}, |
|
{ |
|
"metric": "acc_norm", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "Question: {{question}}\nAnswer:", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"lambada_openai": { |
|
"task": "lambada_openai", |
|
"group": [ |
|
"lambada" |
|
], |
|
"dataset_path": "EleutherAI/lambada_openai", |
|
"dataset_name": "default", |
|
"test_split": "test", |
|
"doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}", |
|
"doc_to_target": "{{' '+text.split(' ')[-1]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "perplexity", |
|
"aggregation": "perplexity", |
|
"higher_is_better": false |
|
}, |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "loglikelihood", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{text}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"piqa": { |
|
"task": "piqa", |
|
"dataset_path": "piqa", |
|
"training_split": "train", |
|
"validation_split": "validation", |
|
"doc_to_text": "Question: {{goal}}\nAnswer:", |
|
"doc_to_target": "label", |
|
"doc_to_choice": "{{[sol1, sol2]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
}, |
|
{ |
|
"metric": "acc_norm", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "goal", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"sciq": { |
|
"task": "sciq", |
|
"dataset_path": "sciq", |
|
"training_split": "train", |
|
"validation_split": "validation", |
|
"test_split": "test", |
|
"doc_to_text": "{{support.lstrip()}}\nQuestion: {{question}}\nAnswer:", |
|
"doc_to_target": 3, |
|
"doc_to_choice": "{{[distractor1, distractor2, distractor3, correct_answer]}}", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
}, |
|
{ |
|
"metric": "acc_norm", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{support}} {{question}}", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
}, |
|
"wikitext": { |
|
"task": "wikitext", |
|
"dataset_path": "EleutherAI/wikitext_document_level", |
|
"dataset_name": "wikitext-2-raw-v1", |
|
"training_split": "train", |
|
"validation_split": "validation", |
|
"test_split": "test", |
|
"doc_to_text": "", |
|
"doc_to_target": "<function wikitext_detokenizer at 0x7f1a1038b7f0>", |
|
"process_results": "<function process_results at 0x7f1a100cb910>", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "word_perplexity" |
|
}, |
|
{ |
|
"metric": "byte_perplexity" |
|
}, |
|
{ |
|
"metric": "bits_per_byte" |
|
} |
|
], |
|
"output_type": "loglikelihood_rolling", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{page}}", |
|
"metadata": { |
|
"version": 2.0 |
|
} |
|
}, |
|
"winogrande": { |
|
"task": "winogrande", |
|
"dataset_path": "winogrande", |
|
"dataset_name": "winogrande_xl", |
|
"training_split": "train", |
|
"validation_split": "validation", |
|
"doc_to_text": "<function doc_to_text at 0x7f1a100e7370>", |
|
"doc_to_target": "<function doc_to_target at 0x7f19ea0b9480>", |
|
"doc_to_choice": "<function doc_to_choice at 0x7f19ea0b96c0>", |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 0, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "sentence", |
|
"metadata": { |
|
"version": 1.0 |
|
} |
|
} |
|
}, |
|
"versions": { |
|
"arc_easy": 1.0, |
|
"lambada_openai": 1.0, |
|
"piqa": 1.0, |
|
"sciq": 1.0, |
|
"wikitext": 2.0, |
|
"winogrande": 1.0 |
|
}, |
|
"n-shot": { |
|
"arc_easy": 0, |
|
"lambada_openai": 0, |
|
"piqa": 0, |
|
"sciq": 0, |
|
"wikitext": 0, |
|
"winogrande": 0 |
|
}, |
|
"config": { |
|
"model": "hf", |
|
"model_args": "pretrained=/netscratch/mostendorff/experiments/pythia-data-ablations/data/continued-pythia-410m/es_soft/hf_checkpoints/global_step9537,dtype=float16", |
|
"batch_size": "auto", |
|
"batch_sizes": [ |
|
64 |
|
], |
|
"device": "cuda:0", |
|
"use_cache": null, |
|
"limit": null, |
|
"bootstrap_iters": 100000, |
|
"gen_kwargs": null |
|
}, |
|
"git_hash": null |
|
} |