lomahony's picture
Upload 8 files
8ca04ae
{
"results": {
"arc_challenge": {
"acc,none": 0.30802047781569963,
"acc_stderr,none": 0.013491429517292038,
"acc_norm,none": 0.3378839590443686,
"acc_norm_stderr,none": 0.013822047922283512
},
"arc_easy": {
"acc,none": 0.6788720538720538,
"acc_stderr,none": 0.009580787536986797,
"acc_norm,none": 0.6047979797979798,
"acc_norm_stderr,none": 0.010031894052790976
},
"boolq": {
"acc,none": 0.6657492354740061,
"acc_stderr,none": 0.00825057245508343
},
"hellaswag": {
"acc,none": 0.48466440948018324,
"acc_stderr,none": 0.004987433862274563,
"acc_norm,none": 0.6404102768372834,
"acc_norm_stderr,none": 0.004788994060654275
},
"lambada_openai": {
"perplexity,none": 3.9048816842205616,
"perplexity_stderr,none": 0.0904411518306548,
"acc,none": 0.6920240636522415,
"acc_stderr,none": 0.006431778256505186
},
"openbookqa": {
"acc,none": 0.264,
"acc_stderr,none": 0.019732885585922094,
"acc_norm,none": 0.374,
"acc_norm_stderr,none": 0.021660710347204484
},
"piqa": {
"acc,none": 0.7600652883569097,
"acc_stderr,none": 0.009963625892809545,
"acc_norm,none": 0.766050054406964,
"acc_norm_stderr,none": 0.009877236895137463
},
"sciq": {
"acc,none": 0.901,
"acc_stderr,none": 0.009449248027662744,
"acc_norm,none": 0.829,
"acc_norm_stderr,none": 0.011912216456264583
},
"wikitext": {
"word_perplexity,none": 18.072185827315998,
"byte_perplexity,none": 1.6114783437385283,
"bits_per_byte,none": 0.6883848005035924
},
"winogrande": {
"acc,none": 0.6164167324388319,
"acc_stderr,none": 0.01366627588953902
}
},
"configs": {
"arc_challenge": {
"task": "arc_challenge",
"group": [
"ai2_arc",
"multiple_choice"
],
"dataset_path": "ai2_arc",
"dataset_name": "ARC-Challenge",
"training_split": "train",
"validation_split": "validation",
"test_split": "test",
"doc_to_text": "Question: {{question}}\nAnswer:",
"doc_to_target": "{{choices.label.index(answerKey)}}",
"doc_to_choice": "{{choices.text}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
},
{
"metric": "acc_norm",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": true,
"doc_to_decontamination_query": "Question: {{question}}\nAnswer:"
},
"arc_easy": {
"task": "arc_easy",
"group": [
"ai2_arc",
"multiple_choice"
],
"dataset_path": "ai2_arc",
"dataset_name": "ARC-Easy",
"training_split": "train",
"validation_split": "validation",
"test_split": "test",
"doc_to_text": "Question: {{question}}\nAnswer:",
"doc_to_target": "{{choices.label.index(answerKey)}}",
"doc_to_choice": "{{choices.text}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
},
{
"metric": "acc_norm",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": true,
"doc_to_decontamination_query": "Question: {{question}}\nAnswer:"
},
"boolq": {
"task": "boolq",
"group": [
"super-glue-lm-eval-v1"
],
"dataset_path": "super_glue",
"dataset_name": "boolq",
"training_split": "train",
"validation_split": "validation",
"doc_to_text": "{{passage}}\nQuestion: {{question}}?\nAnswer:",
"doc_to_target": "label",
"doc_to_choice": [
"no",
"yes"
],
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc"
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": true,
"doc_to_decontamination_query": "passage"
},
"hellaswag": {
"task": "hellaswag",
"group": [
"multiple_choice"
],
"dataset_path": "hellaswag",
"training_split": "train",
"validation_split": "validation",
"doc_to_text": "{% set text = activity_label ~ ': ' ~ ctx_a ~ ' ' ~ ctx_b.capitalize() %}{{text|trim|replace(' [title]', '. ')|regex_replace('\\[.*?\\]', '')|replace(' ', ' ')}}",
"doc_to_target": "{{label}}",
"doc_to_choice": "{{endings|map('trim')|map('replace', ' [title]', '. ')|map('regex_replace', '\\[.*?\\]', '')|map('replace', ' ', ' ')|list}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
},
{
"metric": "acc_norm",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false
},
"lambada_openai": {
"task": "lambada_openai",
"group": [
"lambada",
"loglikelihood",
"perplexity"
],
"dataset_path": "EleutherAI/lambada_openai",
"dataset_name": "default",
"test_split": "test",
"doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
"doc_to_target": "{{' '+text.split(' ')[-1]}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "perplexity",
"aggregation": "perplexity",
"higher_is_better": false
},
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "loglikelihood",
"repeats": 1,
"should_decontaminate": true,
"doc_to_decontamination_query": "{{text}}"
},
"openbookqa": {
"task": "openbookqa",
"group": [
"multiple_choice"
],
"dataset_path": "openbookqa",
"dataset_name": "main",
"training_split": "train",
"validation_split": "validation",
"test_split": "test",
"doc_to_text": "question_stem",
"doc_to_target": "{{choices.label.index(answerKey.lstrip())}}",
"doc_to_choice": "{{choices.text}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
},
{
"metric": "acc_norm",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": true,
"doc_to_decontamination_query": "question_stem"
},
"piqa": {
"task": "piqa",
"group": [
"multiple_choice"
],
"dataset_path": "piqa",
"training_split": "train",
"validation_split": "validation",
"doc_to_text": "Question: {{goal}}\nAnswer:",
"doc_to_target": "label",
"doc_to_choice": "{{[sol1, sol2]}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
},
{
"metric": "acc_norm",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": true,
"doc_to_decontamination_query": "goal"
},
"sciq": {
"task": "sciq",
"group": [
"multiple_choice"
],
"dataset_path": "sciq",
"training_split": "train",
"validation_split": "validation",
"test_split": "test",
"doc_to_text": "{{support.lstrip()}}\nQuestion: {{question}}\nAnswer:",
"doc_to_target": 3,
"doc_to_choice": "{{[distractor1, distractor2, distractor3, correct_answer]}}",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
},
{
"metric": "acc_norm",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": true,
"doc_to_decontamination_query": "{{support}} {{question}}"
},
"wikitext": {
"task": "wikitext",
"group": [
"perplexity",
"loglikelihood_rolling"
],
"dataset_path": "EleutherAI/wikitext_document_level",
"dataset_name": "wikitext-2-raw-v1",
"training_split": "train",
"validation_split": "validation",
"test_split": "test",
"doc_to_text": "",
"doc_to_target": "<function wikitext_detokenizer at 0x7f5ef3dc5120>",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "word_perplexity"
},
{
"metric": "byte_perplexity"
},
{
"metric": "bits_per_byte"
}
],
"output_type": "loglikelihood_rolling",
"repeats": 1,
"should_decontaminate": true,
"doc_to_decontamination_query": "{{page}}"
},
"winogrande": {
"task": "winogrande",
"dataset_path": "winogrande",
"dataset_name": "winogrande_xl",
"training_split": "train",
"validation_split": "validation",
"doc_to_text": "<function doc_to_text at 0x7f5ef3dc5360>",
"doc_to_target": "<function doc_to_target at 0x7f5ef3dc56c0>",
"doc_to_choice": "<function doc_to_choice at 0x7f5ef3dc5a20>",
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": true,
"doc_to_decontamination_query": "sentence"
}
},
"versions": {
"arc_challenge": "Yaml",
"arc_easy": "Yaml",
"boolq": "Yaml",
"hellaswag": "Yaml",
"lambada_openai": "Yaml",
"openbookqa": "Yaml",
"piqa": "Yaml",
"sciq": "Yaml",
"wikitext": "Yaml",
"winogrande": "Yaml"
},
"config": {
"model": "hf",
"model_args": "pretrained=lomahony/eleuther-pythia6.9b-hh-sft",
"batch_size": "4",
"batch_sizes": [],
"device": null,
"use_cache": null,
"limit": null,
"bootstrap_iters": 100000
},
"git_hash": "d1a44c8"
}