{ | |
"results": { | |
"bigbench_cause_and_effect_two_sentences": { | |
"multiple_choice_grade": 0.43137254901960786, | |
"multiple_choice_grade_stderr": 0.07004145529212454 | |
} | |
}, | |
"versions": { | |
"bigbench_cause_and_effect_two_sentences": 0 | |
}, | |
"config": { | |
"model": "hf-causal-experimental", | |
"model_args": "pretrained=bloom", | |
"num_fewshot": 1, | |
"batch_size": null, | |
"device": "cuda:0", | |
"no_cache": true, | |
"limit": null, | |
"bootstrap_iters": 100000, | |
"description_dict": {} | |
} | |
} |