Muennighoff's picture
Add files
899c702
raw
history blame
130 kB
{"amazon_reviews_multi_en": {"prompt_body_title_to_star": {"arguments": "Namespace(config_name=None, dataset_config_name='en', dataset_name='amazon_reviews_multi', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name='en', template_name='prompt_body_title_to_star', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "en", "dataset_name": "amazon_reviews_multi", "evaluation": {"accuracy": 0.5508}, "template_name": "prompt_body_title_to_star"}, "prompt_review_to_star": {"arguments": "Namespace(config_name=None, dataset_config_name='en', dataset_name='amazon_reviews_multi', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name='en', template_name='prompt_review_to_star', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "en", "dataset_name": "amazon_reviews_multi", "evaluation": {"accuracy": 0.51}, "template_name": "prompt_review_to_star"}, "prompt_title_to_star": {"arguments": "Namespace(config_name=None, dataset_config_name='en', dataset_name='amazon_reviews_multi', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name='en', template_name='prompt_title_to_star', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "en", "dataset_name": "amazon_reviews_multi", "evaluation": {"accuracy": 0.3758}, "template_name": "prompt_title_to_star"}}, "amazon_reviews_multi_es": {"prompt_body_title_to_star": {"arguments": "Namespace(config_name=None, dataset_config_name='es', dataset_name='amazon_reviews_multi', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name='en', template_name='prompt_body_title_to_star', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "es", "dataset_name": "amazon_reviews_multi", "evaluation": {"accuracy": 0.4776}, "template_name": "prompt_body_title_to_star"}, "prompt_review_to_star": {"arguments": "Namespace(config_name=None, dataset_config_name='es', dataset_name='amazon_reviews_multi', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name='en', template_name='prompt_review_to_star', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "es", "dataset_name": "amazon_reviews_multi", "evaluation": {"accuracy": 0.4444}, "template_name": "prompt_review_to_star"}, "prompt_title_to_star": {"arguments": "Namespace(config_name=None, dataset_config_name='es', dataset_name='amazon_reviews_multi', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name='en', template_name='prompt_title_to_star', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "es", "dataset_name": "amazon_reviews_multi", "evaluation": {"accuracy": 0.3088}, "template_name": "prompt_title_to_star"}}, "amazon_reviews_multi_fr": {"prompt_body_title_to_star": {"arguments": "Namespace(config_name=None, dataset_config_name='fr', dataset_name='amazon_reviews_multi', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name='en', template_name='prompt_body_title_to_star', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "fr", "dataset_name": "amazon_reviews_multi", "evaluation": {"accuracy": 0.4742}, "template_name": "prompt_body_title_to_star"}, "prompt_review_to_star": {"arguments": "Namespace(config_name=None, dataset_config_name='fr', dataset_name='amazon_reviews_multi', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name='en', template_name='prompt_review_to_star', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "fr", "dataset_name": "amazon_reviews_multi", "evaluation": {"accuracy": 0.4492}, "template_name": "prompt_review_to_star"}, "prompt_title_to_star": {"arguments": "Namespace(config_name=None, dataset_config_name='fr', dataset_name='amazon_reviews_multi', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name='en', template_name='prompt_title_to_star', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "fr", "dataset_name": "amazon_reviews_multi", "evaluation": {"accuracy": 0.3192}, "template_name": "prompt_title_to_star"}}, "amazon_reviews_multi_zh": {"prompt_body_title_to_star": {"arguments": "Namespace(config_name=None, dataset_config_name='zh', dataset_name='amazon_reviews_multi', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name='en', template_name='prompt_body_title_to_star', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "zh", "dataset_name": "amazon_reviews_multi", "evaluation": {"accuracy": 0.4712}, "template_name": "prompt_body_title_to_star"}, "prompt_review_to_star": {"arguments": "Namespace(config_name=None, dataset_config_name='zh', dataset_name='amazon_reviews_multi', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name='en', template_name='prompt_review_to_star', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "zh", "dataset_name": "amazon_reviews_multi", "evaluation": {"accuracy": 0.4478}, "template_name": "prompt_review_to_star"}, "prompt_title_to_star": {"arguments": "Namespace(config_name=None, dataset_config_name='zh', dataset_name='amazon_reviews_multi', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name='en', template_name='prompt_title_to_star', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "zh", "dataset_name": "amazon_reviews_multi", "evaluation": {"accuracy": 0.3208}, "template_name": "prompt_title_to_star"}}, "aqua_rat_raw": {"Answer questions from options": {"arguments": "Namespace(config_name=None, dataset_config_name='raw', dataset_name='aqua_rat', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, target_max_length=256, template_config_name=None, template_name='Answer questions from options', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "raw", "dataset_name": "aqua_rat", "evaluation": {"accuracy": 0.2440944881889764}, "template_name": "Answer questions from options"}, "answer_quiz": {"arguments": "Namespace(config_name=None, dataset_config_name='raw', dataset_name='aqua_rat', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, target_max_length=256, template_config_name=None, template_name='answer_quiz', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "raw", "dataset_name": "aqua_rat", "evaluation": {"accuracy": 0.23228346456692914}, "template_name": "answer_quiz"}, "select_the_best_option": {"arguments": "Namespace(config_name=None, dataset_config_name='raw', dataset_name='aqua_rat', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, target_max_length=256, template_config_name=None, template_name='select_the_best_option', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "raw", "dataset_name": "aqua_rat", "evaluation": {"accuracy": 0.25196850393700787}, "template_name": "select_the_best_option"}}, "art_None": {"choose_hypothesis": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='art', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='choose_hypothesis', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "art", "evaluation": {"accuracy": 0.6109660574412533}, "template_name": "choose_hypothesis"}, "choose_hypothesis_believable": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='art', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='choose_hypothesis_believable', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "art", "evaluation": {"accuracy": 0.5926892950391645}, "template_name": "choose_hypothesis_believable"}, "choose_hypothesis_desc": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='art', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='choose_hypothesis_desc', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "art", "evaluation": {"accuracy": 0.554177545691906}, "template_name": "choose_hypothesis_desc"}, "choose_hypothesis_likely": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='art', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='choose_hypothesis_likely', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "art", "evaluation": {"accuracy": 0.5587467362924282}, "template_name": "choose_hypothesis_likely"}, "choose_hypothesis_options": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='art', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='choose_hypothesis_options', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "art", "evaluation": {"accuracy": 0.5842036553524804}, "template_name": "choose_hypothesis_options"}}, "banking77_None": {"direct_to_which_department": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='banking77', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=4, prefixlm=False, split='test', target_max_length=256, template_config_name=None, template_name='direct_to_which_department', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "banking77", "evaluation": {"accuracy": 0.19675324675324676}, "template_name": "direct_to_which_department"}, "help_page_topic": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='banking77', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=4, prefixlm=False, split='test', target_max_length=256, template_config_name=None, template_name='help_page_topic', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "banking77", "evaluation": {"accuracy": 0.2538961038961039}, "template_name": "help_page_topic"}, "rephrase_as_banking_term": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='banking77', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=4, prefixlm=False, split='test', target_max_length=256, template_config_name=None, template_name='rephrase_as_banking_term', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "banking77", "evaluation": {"accuracy": 0.2636363636363636}, "template_name": "rephrase_as_banking_term"}}, "blbooksgenre_title_genre_classifiction": {"classify": {"arguments": "Namespace(config_name=None, dataset_config_name='title_genre_classifiction', dataset_name='blbooksgenre', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='classify', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "title_genre_classifiction", "dataset_name": "blbooksgenre", "evaluation": {"accuracy": 0.27764976958525345}, "template_name": "classify"}, "multi-choice": {"arguments": "Namespace(config_name=None, dataset_config_name='title_genre_classifiction', dataset_name='blbooksgenre', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='multi-choice', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "title_genre_classifiction", "dataset_name": "blbooksgenre", "evaluation": {"accuracy": 0.8456221198156681}, "template_name": "multi-choice"}, "premise_context_first": {"arguments": "Namespace(config_name=None, dataset_config_name='title_genre_classifiction', dataset_name='blbooksgenre', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='premise_context_first', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "title_genre_classifiction", "dataset_name": "blbooksgenre", "evaluation": {"accuracy": 0.7494239631336406}, "template_name": "premise_context_first"}}, "blimp_adjunct_island": {"grammatical_between_1_2": {"arguments": "Namespace(config_name=None, dataset_config_name='adjunct_island', dataset_name='blimp', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='grammatical_between_1_2', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "adjunct_island", "dataset_name": "blimp", "evaluation": {"accuracy": 0.516}, "template_name": "grammatical_between_1_2"}, "grammatical_between_A_B": {"arguments": "Namespace(config_name=None, dataset_config_name='adjunct_island', dataset_name='blimp', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='grammatical_between_A_B', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "adjunct_island", "dataset_name": "blimp", "evaluation": {"accuracy": 0.428}, "template_name": "grammatical_between_A_B"}, "grammatical_which_one_1_2": {"arguments": "Namespace(config_name=None, dataset_config_name='adjunct_island', dataset_name='blimp', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='grammatical_which_one_1_2', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "adjunct_island", "dataset_name": "blimp", "evaluation": {"accuracy": 0.576}, "template_name": "grammatical_which_one_1_2"}, "single_sentence_bad_yes_no": {"arguments": "Namespace(config_name=None, dataset_config_name='adjunct_island', dataset_name='blimp', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='single_sentence_bad_yes_no', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "adjunct_island", "dataset_name": "blimp", "evaluation": {"accuracy": 0.478}, "template_name": "single_sentence_bad_yes_no"}, "single_sentence_good_yes_no": {"arguments": "Namespace(config_name=None, dataset_config_name='adjunct_island', dataset_name='blimp', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='single_sentence_good_yes_no', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "adjunct_island", "dataset_name": "blimp", "evaluation": {"accuracy": 0.499}, "template_name": "single_sentence_good_yes_no"}}, "climate_fever_None": {"claim_and_all_supporting_evidences": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='climate_fever', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, target_max_length=256, template_config_name=None, template_name='claim_and_all_supporting_evidences', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "climate_fever", "evaluation": {"accuracy": 0.4273615635179153}, "template_name": "claim_and_all_supporting_evidences"}, "fifth_evidence_and_claim_itemization": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='climate_fever', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, target_max_length=256, template_config_name=None, template_name='fifth_evidence_and_claim_itemization', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "climate_fever", "evaluation": {"accuracy": 0.43973941368078173}, "template_name": "fifth_evidence_and_claim_itemization"}, "first_evidence_and_claim_itemization": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='climate_fever', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, target_max_length=256, template_config_name=None, template_name='first_evidence_and_claim_itemization', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "climate_fever", "evaluation": {"accuracy": 0.3283387622149837}, "template_name": "first_evidence_and_claim_itemization"}, "second_evidence_and_claim_itemization": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='climate_fever', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, target_max_length=256, template_config_name=None, template_name='second_evidence_and_claim_itemization', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "climate_fever", "evaluation": {"accuracy": 0.47687296416938113}, "template_name": "second_evidence_and_claim_itemization"}, "third_evidence_claim_pair": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='climate_fever', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, target_max_length=256, template_config_name=None, template_name='third_evidence_claim_pair', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "climate_fever", "evaluation": {"accuracy": 0.4586319218241042}, "template_name": "third_evidence_claim_pair"}}, "codah_codah": {"affirmative_instruction_after_sentence_and_choices": {"arguments": "Namespace(config_name=None, dataset_config_name='codah', dataset_name='codah', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, target_max_length=256, template_config_name=None, template_name='affirmative_instruction_after_sentence_and_choices', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "codah", "dataset_name": "codah", "evaluation": {"accuracy": 0.7327089337175793}, "template_name": "affirmative_instruction_after_sentence_and_choices"}, "affirmative_instruction_before_sentence_and_choices": {"arguments": "Namespace(config_name=None, dataset_config_name='codah', dataset_name='codah', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, target_max_length=256, template_config_name=None, template_name='affirmative_instruction_before_sentence_and_choices', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "codah", "dataset_name": "codah", "evaluation": {"accuracy": 0.7359510086455331}, "template_name": "affirmative_instruction_before_sentence_and_choices"}, "interrogative_instruction_after_sentence_and_choices": {"arguments": "Namespace(config_name=None, dataset_config_name='codah', dataset_name='codah', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, target_max_length=256, template_config_name=None, template_name='interrogative_instruction_after_sentence_and_choices', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "codah", "dataset_name": "codah", "evaluation": {"accuracy": 0.736671469740634}, "template_name": "interrogative_instruction_after_sentence_and_choices"}}, "commonsense_qa_None": {"answer_given_question_without_options": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='commonsense_qa', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, target_max_length=256, template_config_name=None, template_name='answer_given_question_without_options', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "commonsense_qa", "evaluation": {"accuracy": 0.6224406224406225}, "template_name": "answer_given_question_without_options"}, "most_suitable_answer": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='commonsense_qa', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, target_max_length=256, template_config_name=None, template_name='most_suitable_answer', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "commonsense_qa", "evaluation": {"accuracy": 0.8435708435708436}, "template_name": "most_suitable_answer"}, "question_answering": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='commonsense_qa', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, target_max_length=256, template_config_name=None, template_name='question_answering', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "commonsense_qa", "evaluation": {"accuracy": 0.8304668304668305}, "template_name": "question_answering"}}, "conv_ai_3_None": {"ambiguous": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='conv_ai_3', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='ambiguous', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "conv_ai_3", "evaluation": {"accuracy": 0.39040207522697795}, "template_name": "ambiguous"}, "clarification_needed": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='conv_ai_3', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='clarification_needed', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "conv_ai_3", "evaluation": {"accuracy": 0.39040207522697795}, "template_name": "clarification_needed"}, "directly_answer": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='conv_ai_3', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='directly_answer', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "conv_ai_3", "evaluation": {"accuracy": 0.6095979247730221}, "template_name": "directly_answer"}, "score_give_number": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='conv_ai_3', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='score_give_number', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "conv_ai_3", "evaluation": {"accuracy": 0.37959360138348464}, "template_name": "score_give_number"}, "score_how_much": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='conv_ai_3', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='score_how_much', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "conv_ai_3", "evaluation": {"accuracy": 0.03285776048421963}, "template_name": "score_how_much"}}, "craigslist_bargains_None": {"best deal": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='craigslist_bargains', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='best deal', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "craigslist_bargains", "evaluation": {"accuracy": 0.49246231155778897}, "template_name": "best deal"}, "good deal for seller": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='craigslist_bargains', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='good deal for seller', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "craigslist_bargains", "evaluation": {"accuracy": 0.4371859296482412}, "template_name": "good deal for seller"}, "good deal for seller no list price": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='craigslist_bargains', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='good deal for seller no list price', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "craigslist_bargains", "evaluation": {"accuracy": 0.6046901172529313}, "template_name": "good deal for seller no list price"}, "good deal for seller no list price implicit": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='craigslist_bargains', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='good deal for seller no list price implicit', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "craigslist_bargains", "evaluation": {"accuracy": 0.25963149078726966}, "template_name": "good deal for seller no list price implicit"}}, "emotion_None": {"answer_question_with_emotion_label": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='emotion', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='test', target_max_length=256, template_config_name=None, template_name='answer_question_with_emotion_label', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "emotion", "evaluation": {"accuracy": 0.344}, "template_name": "answer_question_with_emotion_label"}, "answer_with_class_label": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='emotion', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='test', target_max_length=256, template_config_name=None, template_name='answer_with_class_label', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "emotion", "evaluation": {"accuracy": 0.2295}, "template_name": "answer_with_class_label"}, "choose_the_best_emotion_label": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='emotion', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='test', target_max_length=256, template_config_name=None, template_name='choose_the_best_emotion_label', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "emotion", "evaluation": {"accuracy": 0.317}, "template_name": "choose_the_best_emotion_label"}, "reply_with_emoation_label": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='emotion', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='test', target_max_length=256, template_config_name=None, template_name='reply_with_emoation_label', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "emotion", "evaluation": {"accuracy": 0.5025}, "template_name": "reply_with_emoation_label"}}, "financial_phrasebank_sentences_allagree": {"bullish_neutral_bearish": {"arguments": "Namespace(config_name=None, dataset_config_name='sentences_allagree', dataset_name='financial_phrasebank', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='bullish_neutral_bearish', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "sentences_allagree", "dataset_name": "financial_phrasebank", "evaluation": {"accuracy": 0.3710247349823322}, "template_name": "bullish_neutral_bearish"}, "complementary_industries": {"arguments": "Namespace(config_name=None, dataset_config_name='sentences_allagree', dataset_name='financial_phrasebank', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='complementary_industries', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "sentences_allagree", "dataset_name": "financial_phrasebank", "evaluation": {"accuracy": 0.04637809187279152}, "template_name": "complementary_industries"}, "sentiment": {"arguments": "Namespace(config_name=None, dataset_config_name='sentences_allagree', dataset_name='financial_phrasebank', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='sentiment', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "sentences_allagree", "dataset_name": "financial_phrasebank", "evaluation": {"accuracy": 0.3489399293286219}, "template_name": "sentiment"}, "share_price_option": {"arguments": "Namespace(config_name=None, dataset_config_name='sentences_allagree', dataset_name='financial_phrasebank', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='share_price_option', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "sentences_allagree", "dataset_name": "financial_phrasebank", "evaluation": {"accuracy": 0.37146643109540634}, "template_name": "share_price_option"}, "word_comes_to_mind": {"arguments": "Namespace(config_name=None, dataset_config_name='sentences_allagree', dataset_name='financial_phrasebank', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='word_comes_to_mind', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "sentences_allagree", "dataset_name": "financial_phrasebank", "evaluation": {"accuracy": 0.01987632508833922}, "template_name": "word_comes_to_mind"}}, "glue_cola": {"Following sentence acceptable": {"arguments": "Namespace(config_name=None, dataset_config_name='cola', dataset_name='glue', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='Following sentence acceptable', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "cola", "dataset_name": "glue", "evaluation": {"accuracy": 0.5685522531160115}, "template_name": "Following sentence acceptable"}, "Make sense yes no": {"arguments": "Namespace(config_name=None, dataset_config_name='cola', dataset_name='glue', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='Make sense yes no', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "cola", "dataset_name": "glue", "evaluation": {"accuracy": 0.3326941514860978}, "template_name": "Make sense yes no"}, "Previous sentence acceptable": {"arguments": "Namespace(config_name=None, dataset_config_name='cola', dataset_name='glue', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='Previous sentence acceptable', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "cola", "dataset_name": "glue", "evaluation": {"accuracy": 0.3096836049856184}, "template_name": "Previous sentence acceptable"}, "editing": {"arguments": "Namespace(config_name=None, dataset_config_name='cola', dataset_name='glue', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='editing', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "cola", "dataset_name": "glue", "evaluation": {"accuracy": 0.3144774688398849}, "template_name": "editing"}, "is_this_correct": {"arguments": "Namespace(config_name=None, dataset_config_name='cola', dataset_name='glue', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='is_this_correct', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "cola", "dataset_name": "glue", "evaluation": {"accuracy": 0.4592521572387344}, "template_name": "is_this_correct"}}, "glue_sst2": {"following positive negative": {"arguments": "Namespace(config_name=None, dataset_config_name='sst2', dataset_name='glue', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='following positive negative', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "sst2", "dataset_name": "glue", "evaluation": {"accuracy": 0.9415137614678899}, "template_name": "following positive negative"}, "happy or mad": {"arguments": "Namespace(config_name=None, dataset_config_name='sst2', dataset_name='glue', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='happy or mad', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "sst2", "dataset_name": "glue", "evaluation": {"accuracy": 0.9013761467889908}, "template_name": "happy or mad"}, "positive negative after": {"arguments": "Namespace(config_name=None, dataset_config_name='sst2', dataset_name='glue', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='positive negative after', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "sst2", "dataset_name": "glue", "evaluation": {"accuracy": 0.9461009174311926}, "template_name": "positive negative after"}, "review": {"arguments": "Namespace(config_name=None, dataset_config_name='sst2', dataset_name='glue', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='review', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "sst2", "dataset_name": "glue", "evaluation": {"accuracy": 0.9403669724770642}, "template_name": "review"}, "said": {"arguments": "Namespace(config_name=None, dataset_config_name='sst2', dataset_name='glue', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='said', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "sst2", "dataset_name": "glue", "evaluation": {"accuracy": 0.9185779816513762}, "template_name": "said"}}, "head_qa_en": {"multiple_choice_a_and_q_en": {"arguments": "Namespace(config_name=None, dataset_config_name='en', dataset_name='head_qa', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, target_max_length=256, template_config_name='en', template_name='multiple_choice_a_and_q_en', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "en", "dataset_name": "head_qa", "evaluation": {"accuracy": 0.27379209370424595}, "template_name": "multiple_choice_a_and_q_en"}, "multiple_choice_a_and_q_with_context_en": {"arguments": "Namespace(config_name=None, dataset_config_name='en', dataset_name='head_qa', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, target_max_length=256, template_config_name='en', template_name='multiple_choice_a_and_q_with_context_en', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "en", "dataset_name": "head_qa", "evaluation": {"accuracy": 0.2730600292825769}, "template_name": "multiple_choice_a_and_q_with_context_en"}, "multiple_choice_q_and_a_en": {"arguments": "Namespace(config_name=None, dataset_config_name='en', dataset_name='head_qa', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, target_max_length=256, template_config_name='en', template_name='multiple_choice_q_and_a_en', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "en", "dataset_name": "head_qa", "evaluation": {"accuracy": 0.40922401171303074}, "template_name": "multiple_choice_q_and_a_en"}, "multiple_choice_q_and_a_index_en": {"arguments": "Namespace(config_name=None, dataset_config_name='en', dataset_name='head_qa', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, target_max_length=256, template_config_name='en', template_name='multiple_choice_q_and_a_index_en', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "en", "dataset_name": "head_qa", "evaluation": {"accuracy": 0.3916544655929722}, "template_name": "multiple_choice_q_and_a_index_en"}, "multiple_choice_q_and_a_index_with_context_en": {"arguments": "Namespace(config_name=None, dataset_config_name='en', dataset_name='head_qa', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, target_max_length=256, template_config_name='en', template_name='multiple_choice_q_and_a_index_with_context_en', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "en", "dataset_name": "head_qa", "evaluation": {"accuracy": 0.3857979502196193}, "template_name": "multiple_choice_q_and_a_index_with_context_en"}}, "head_qa_es": {"multiple_choice_a_and_q_en": {"arguments": "Namespace(config_name=None, dataset_config_name='es', dataset_name='head_qa', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, target_max_length=256, template_config_name='en', template_name='multiple_choice_a_and_q_en', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "es", "dataset_name": "head_qa", "evaluation": {"accuracy": 0.2679355783308931}, "template_name": "multiple_choice_a_and_q_en"}, "multiple_choice_a_and_q_with_context_en": {"arguments": "Namespace(config_name=None, dataset_config_name='es', dataset_name='head_qa', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, target_max_length=256, template_config_name='en', template_name='multiple_choice_a_and_q_with_context_en', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "es", "dataset_name": "head_qa", "evaluation": {"accuracy": 0.2642752562225476}, "template_name": "multiple_choice_a_and_q_with_context_en"}, "multiple_choice_q_and_a_en": {"arguments": "Namespace(config_name=None, dataset_config_name='es', dataset_name='head_qa', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, target_max_length=256, template_config_name='en', template_name='multiple_choice_q_and_a_en', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "es", "dataset_name": "head_qa", "evaluation": {"accuracy": 0.39751098096632503}, "template_name": "multiple_choice_q_and_a_en"}, "multiple_choice_q_and_a_index_en": {"arguments": "Namespace(config_name=None, dataset_config_name='es', dataset_name='head_qa', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, target_max_length=256, template_config_name='en', template_name='multiple_choice_q_and_a_index_en', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "es", "dataset_name": "head_qa", "evaluation": {"accuracy": 0.3506588579795022}, "template_name": "multiple_choice_q_and_a_index_en"}, "multiple_choice_q_and_a_index_with_context_en": {"arguments": "Namespace(config_name=None, dataset_config_name='es', dataset_name='head_qa', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, target_max_length=256, template_config_name='en', template_name='multiple_choice_q_and_a_index_with_context_en', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "es", "dataset_name": "head_qa", "evaluation": {"accuracy": 0.3440702781844802}, "template_name": "multiple_choice_q_and_a_index_with_context_en"}}, "health_fact_None": {"claim_explanation_classification": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='health_fact', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='claim_explanation_classification', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "health_fact", "evaluation": {"accuracy": 0.5755102040816327}, "template_name": "claim_explanation_classification"}, "claim_veracity_classification_after_reading_I_believe": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='health_fact', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='claim_veracity_classification_after_reading_I_believe', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "health_fact", "evaluation": {"accuracy": 0.31510204081632653}, "template_name": "claim_veracity_classification_after_reading_I_believe"}, "claim_veracity_classification_tell_me": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='health_fact', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='claim_veracity_classification_tell_me', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "health_fact", "evaluation": {"accuracy": 0.053877551020408164}, "template_name": "claim_veracity_classification_tell_me"}}, "hlgd_None": {"is_same_event_editor_asks": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='hlgd', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='is_same_event_editor_asks', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "hlgd", "evaluation": {"accuracy": 0.6230062832286128}, "template_name": "is_same_event_editor_asks"}, "is_same_event_interrogative_talk": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='hlgd', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='is_same_event_interrogative_talk', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "hlgd", "evaluation": {"accuracy": 0.7056549057515709}, "template_name": "is_same_event_interrogative_talk"}, "is_same_event_refer": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='hlgd', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='is_same_event_refer', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "hlgd", "evaluation": {"accuracy": 0.6457225712904785}, "template_name": "is_same_event_refer"}, "is_same_event_with_time_interrogative_related": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='hlgd', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='is_same_event_with_time_interrogative_related', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "hlgd", "evaluation": {"accuracy": 0.7873368777187046}, "template_name": "is_same_event_with_time_interrogative_related"}, "is_same_event_with_time_interrogative_talk": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='hlgd', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='is_same_event_with_time_interrogative_talk', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "hlgd", "evaluation": {"accuracy": 0.8182696955050749}, "template_name": "is_same_event_with_time_interrogative_talk"}}, "hyperpartisan_news_detection_byarticle": {"consider_does_it_follow_a_hyperpartisan_argumentation": {"arguments": "Namespace(config_name=None, dataset_config_name='byarticle', dataset_name='hyperpartisan_news_detection', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='consider_does_it_follow_a_hyperpartisan_argumentation', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "byarticle", "dataset_name": "hyperpartisan_news_detection", "evaluation": {"accuracy": 0.6310077519379845}, "template_name": "consider_does_it_follow_a_hyperpartisan_argumentation"}, "consider_it_exhibits_extreme_one_sidedness": {"arguments": "Namespace(config_name=None, dataset_config_name='byarticle', dataset_name='hyperpartisan_news_detection', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='consider_it_exhibits_extreme_one_sidedness', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "byarticle", "dataset_name": "hyperpartisan_news_detection", "evaluation": {"accuracy": 0.6310077519379845}, "template_name": "consider_it_exhibits_extreme_one_sidedness"}, "consume_with_caution": {"arguments": "Namespace(config_name=None, dataset_config_name='byarticle', dataset_name='hyperpartisan_news_detection', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='consume_with_caution', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "byarticle", "dataset_name": "hyperpartisan_news_detection", "evaluation": {"accuracy": 0.6310077519379845}, "template_name": "consume_with_caution"}, "extreme_left_wing_or_right_wing": {"arguments": "Namespace(config_name=None, dataset_config_name='byarticle', dataset_name='hyperpartisan_news_detection', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='extreme_left_wing_or_right_wing', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "byarticle", "dataset_name": "hyperpartisan_news_detection", "evaluation": {"accuracy": 0.6310077519379845}, "template_name": "extreme_left_wing_or_right_wing"}, "follows_hyperpartisan_argumentation": {"arguments": "Namespace(config_name=None, dataset_config_name='byarticle', dataset_name='hyperpartisan_news_detection', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='follows_hyperpartisan_argumentation', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "byarticle", "dataset_name": "hyperpartisan_news_detection", "evaluation": {"accuracy": 0.6310077519379845}, "template_name": "follows_hyperpartisan_argumentation"}}, "liar_None": {"Given statement guess category": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='liar', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='Given statement guess category', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "liar", "evaluation": {"accuracy": 0.20794392523364486}, "template_name": "Given statement guess category"}}, "lince_sa_spaeng": {"express sentiment": {"arguments": "Namespace(config_name=None, dataset_config_name='sa_spaeng', dataset_name='lince', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='express sentiment', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "sa_spaeng", "dataset_name": "lince", "evaluation": {"accuracy": 0.5814954276492738}, "template_name": "express sentiment"}, "negation template": {"arguments": "Namespace(config_name=None, dataset_config_name='sa_spaeng', dataset_name='lince', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='negation template', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "sa_spaeng", "dataset_name": "lince", "evaluation": {"accuracy": 0.16621839698762775}, "template_name": "negation template"}, "original poster expressed sentiment": {"arguments": "Namespace(config_name=None, dataset_config_name='sa_spaeng', dataset_name='lince', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='original poster expressed sentiment', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "sa_spaeng", "dataset_name": "lince", "evaluation": {"accuracy": 0.5831091984938139}, "template_name": "original poster expressed sentiment"}, "sentiment trying to express": {"arguments": "Namespace(config_name=None, dataset_config_name='sa_spaeng', dataset_name='lince', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='sentiment trying to express', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "sa_spaeng", "dataset_name": "lince", "evaluation": {"accuracy": 0.584722969338354}, "template_name": "sentiment trying to express"}, "the author seem": {"arguments": "Namespace(config_name=None, dataset_config_name='sa_spaeng', dataset_name='lince', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='the author seem', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "sa_spaeng", "dataset_name": "lince", "evaluation": {"accuracy": 0.5691231845077999}, "template_name": "the author seem"}}, "math_qa_None": {"choose_correct_og": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='math_qa', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='test', target_max_length=256, template_config_name=None, template_name='choose_correct_og', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "math_qa", "evaluation": {"accuracy": 0.2100502512562814}, "template_name": "choose_correct_og"}, "first_choice_then_problem": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='math_qa', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='test', target_max_length=256, template_config_name=None, template_name='first_choice_then_problem', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "math_qa", "evaluation": {"accuracy": 0.21708542713567838}, "template_name": "first_choice_then_problem"}, "gre_problem": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='math_qa', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='test', target_max_length=256, template_config_name=None, template_name='gre_problem', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "math_qa", "evaluation": {"accuracy": 0.20871021775544388}, "template_name": "gre_problem"}, "pick_the_correct": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='math_qa', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='test', target_max_length=256, template_config_name=None, template_name='pick_the_correct', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "math_qa", "evaluation": {"accuracy": 0.21139028475711893}, "template_name": "pick_the_correct"}, "problem_set_type": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='math_qa', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='test', target_max_length=256, template_config_name=None, template_name='problem_set_type', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "math_qa", "evaluation": {"accuracy": 0.38123953098827473}, "template_name": "problem_set_type"}}, "mlsum_es": {"layman_summ_es": {"bleu": 3.612948631805906, "bleu_stderr": 0.09706767527109322, "rouge1_fmeasure": 0.2132867654263301, "rouge1_fmeasure_stderr": 0.0023414288877090226, "rouge1_precision": 0.25720877609362947, "rouge1_precision_stderr": 0.002866204775543915, "rouge1_recall": 0.21570826045711883, "rouge1_recall_stderr": 0.0028219768467931316, "rouge2_fmeasure": 0.056007499633099506, "rouge2_fmeasure_stderr": 0.001671996707075678, "rouge2_precision": 0.0656763315976497, "rouge2_precision_stderr": 0.002091174894878516, "rouge2_recall": 0.058883909534972835, "rouge2_recall_stderr": 0.0018684387267495434, "rougeL_fmeasure": 0.16637065130970433, "rougeL_fmeasure_stderr": 0.0019401940534334063, "rougeL_precision": 0.20296195866113786, "rougeL_precision_stderr": 0.002463373191290898, "rougeL_recall": 0.16800778229835897, "rougeL_recall_stderr": 0.0023113725152130527, "rougeLsum_fmeasure": 0.1703003950966778, "rougeLsum_fmeasure_stderr": 0.0019655927463575074, "rougeLsum_precision": 0.20768190658213875, "rougeLsum_precision_stderr": 0.0024996543423916463, "rougeLsum_recall": 0.17213528120005406, "rougeLsum_recall_stderr": 0.0023628667195956534}, "palm_prompt": {"bleu": 4.815053212697339, "bleu_stderr": 0.22929921240192092, "rouge1_fmeasure": 0.24155464763055362, "rouge1_fmeasure_stderr": 0.0024021553413102275, "rouge1_precision": 0.2664214534878677, "rouge1_precision_stderr": 0.002736901307721048, "rouge1_recall": 0.259042211572509, "rouge1_recall_stderr": 0.003023227343717013, "rouge2_fmeasure": 0.07072194087888328, "rouge2_fmeasure_stderr": 0.0019187225902360238, "rouge2_precision": 0.07596613017420617, "rouge2_precision_stderr": 0.002093675494291177, "rouge2_recall": 0.07803116812507413, "rouge2_recall_stderr": 0.002214980401457295, "rougeL_fmeasure": 0.18681843812599136, "rougeL_fmeasure_stderr": 0.0020613027767420913, "rougeL_precision": 0.20732883923716353, "rougeL_precision_stderr": 0.002355023098360087, "rougeL_recall": 0.20013530891135448, "rougeL_recall_stderr": 0.002545877916378003, "rougeLsum_fmeasure": 0.19073355304638137, "rougeLsum_fmeasure_stderr": 0.0020838845388973935, "rougeLsum_precision": 0.21204351337105004, "rougeLsum_precision_stderr": 0.0024061908027064216, "rougeLsum_recall": 0.2042193199739302, "rougeLsum_recall_stderr": 0.0025788791923903134}, "summarise_this_in_es_few_sentences": {"bleu": 3.3853246386342164, "bleu_stderr": 0.14255747881032574, "rouge1_fmeasure": 0.22137151454609671, "rouge1_fmeasure_stderr": 0.0022631062388600797, "rouge1_precision": 0.2397007754084853, "rouge1_precision_stderr": 0.0026353479818885177, "rouge1_recall": 0.2572731691168125, "rouge1_recall_stderr": 0.003144380928466416, "rouge2_fmeasure": 0.058841155503591265, "rouge2_fmeasure_stderr": 0.0016575332078524489, "rouge2_precision": 0.062156085362161634, "rouge2_precision_stderr": 0.0018790101902136723, "rouge2_recall": 0.07205204887012716, "rouge2_recall_stderr": 0.002177971370605756, "rougeL_fmeasure": 0.1687112149663523, "rougeL_fmeasure_stderr": 0.0018447826454456768, "rougeL_precision": 0.1848267455777402, "rougeL_precision_stderr": 0.002224591232921315, "rougeL_recall": 0.1956072474579404, "rougeL_recall_stderr": 0.0025524780621392633, "rougeLsum_fmeasure": 0.17448210890640256, "rougeLsum_fmeasure_stderr": 0.0018864032221029344, "rougeLsum_precision": 0.19064968377556207, "rougeLsum_precision_stderr": 0.0022683008399854887, "rougeLsum_recall": 0.2037386599504157, "rougeLsum_recall_stderr": 0.0026825257897514446}}, "movie_rationales_None": {"Evidences + review": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='movie_rationales', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='Evidences + review', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "movie_rationales", "evaluation": {"accuracy": 0.975}, "template_name": "Evidences + review"}, "Evidences sentiment classification": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='movie_rationales', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='Evidences sentiment classification', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "movie_rationales", "evaluation": {"accuracy": 0.99}, "template_name": "Evidences sentiment classification"}, "Standard binary sentiment analysis": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='movie_rationales', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='Standard binary sentiment analysis', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "movie_rationales", "evaluation": {"accuracy": 0.95}, "template_name": "Standard binary sentiment analysis"}}, "mwsc_None": {"in-the-sentence": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='mwsc', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='in-the-sentence', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "mwsc", "evaluation": {"accuracy": 0.5853658536585366}, "template_name": "in-the-sentence"}, "in-the-sentence-question-first": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='mwsc', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='in-the-sentence-question-first', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "mwsc", "evaluation": {"accuracy": 0.6219512195121951}, "template_name": "in-the-sentence-question-first"}, "is-correct": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='mwsc', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='is-correct', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "mwsc", "evaluation": {"accuracy": 0.5487804878048781}, "template_name": "is-correct"}, "options-or": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='mwsc', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='options-or', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "mwsc", "evaluation": {"accuracy": 0.6951219512195121}, "template_name": "options-or"}, "what-think": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='mwsc', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='what-think', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "mwsc", "evaluation": {"accuracy": 0.6951219512195121}, "template_name": "what-think"}}, "onestop_english_None": {"ara_context": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='onestop_english', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='ara_context', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "onestop_english", "evaluation": {"accuracy": 0.2945326278659612}, "template_name": "ara_context"}, "assess": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='onestop_english', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='assess', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "onestop_english", "evaluation": {"accuracy": 0.54673721340388}, "template_name": "assess"}, "determine_reading_level_from_the_first_three_sentences": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='onestop_english', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='determine_reading_level_from_the_first_three_sentences', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "onestop_english", "evaluation": {"accuracy": 0.37918871252204583}, "template_name": "determine_reading_level_from_the_first_three_sentences"}, "esl_context": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='onestop_english', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='esl_context', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "onestop_english", "evaluation": {"accuracy": 0.6402116402116402}, "template_name": "esl_context"}, "esl_variation": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='onestop_english', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='esl_variation', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "onestop_english", "evaluation": {"accuracy": 0.5961199294532628}, "template_name": "esl_variation"}}, "poem_sentiment_None": {"guess_sentiment_without_options_variation_1": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='poem_sentiment', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='guess_sentiment_without_options_variation_1', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "poem_sentiment", "evaluation": {"accuracy": 0.21904761904761905}, "template_name": "guess_sentiment_without_options_variation_1"}, "most_appropriate_sentiment": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='poem_sentiment', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='most_appropriate_sentiment', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "poem_sentiment", "evaluation": {"accuracy": 0.29523809523809524}, "template_name": "most_appropriate_sentiment"}, "positive_or_negative_sentiment_variation_1": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='poem_sentiment', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='positive_or_negative_sentiment_variation_1', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "poem_sentiment", "evaluation": {"accuracy": 0.21904761904761905}, "template_name": "positive_or_negative_sentiment_variation_1"}, "positive_or_negative_sentiment_variation_2": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='poem_sentiment', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='positive_or_negative_sentiment_variation_2', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "poem_sentiment", "evaluation": {"accuracy": 0.22857142857142856}, "template_name": "positive_or_negative_sentiment_variation_2"}, "question_answer_format": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='poem_sentiment', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='question_answer_format', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "poem_sentiment", "evaluation": {"accuracy": 0.2571428571428571}, "template_name": "question_answer_format"}}, "pubmed_qa_pqa_labeled": {"Long Answer to Final Decision": {"arguments": "Namespace(config_name=None, dataset_config_name='pqa_labeled', dataset_name='pubmed_qa', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='Long Answer to Final Decision', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "pqa_labeled", "dataset_name": "pubmed_qa", "evaluation": {"accuracy": 0.648}, "template_name": "Long Answer to Final Decision"}, "Question Answering (Short)": {"arguments": "Namespace(config_name=None, dataset_config_name='pqa_labeled', dataset_name='pubmed_qa', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='Question Answering (Short)', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "pqa_labeled", "dataset_name": "pubmed_qa", "evaluation": {"accuracy": 0.695}, "template_name": "Question Answering (Short)"}}, "riddle_sense_None": {"answer_given_question_without_options": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='riddle_sense', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='answer_given_question_without_options', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "riddle_sense", "evaluation": {"accuracy": 0.48090107737512244}, "template_name": "answer_given_question_without_options"}, "most_suitable_answer": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='riddle_sense', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='most_suitable_answer', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "riddle_sense", "evaluation": {"accuracy": 0.40254652301665034}, "template_name": "most_suitable_answer"}, "question_answering": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='riddle_sense', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='question_answering', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "riddle_sense", "evaluation": {"accuracy": 0.3868756121449559}, "template_name": "question_answering"}, "question_to_answer_index": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='riddle_sense', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='question_to_answer_index', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "riddle_sense", "evaluation": {"accuracy": 0.3702252693437806}, "template_name": "question_to_answer_index"}}, "scicite_None": {"Classify intent": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='scicite', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='Classify intent', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "scicite", "evaluation": {"accuracy": 0.20414847161572053}, "template_name": "Classify intent"}, "Classify intent (choices first)": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='scicite', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='Classify intent (choices first)', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "scicite", "evaluation": {"accuracy": 0.21069868995633187}, "template_name": "Classify intent (choices first)"}, "Classify intent (select choice)": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='scicite', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='Classify intent (select choice)', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "scicite", "evaluation": {"accuracy": 0.45414847161572053}, "template_name": "Classify intent (select choice)"}, "Classify intent w/section (select choice)": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='scicite', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='Classify intent w/section (select choice)', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "scicite", "evaluation": {"accuracy": 0.5032751091703057}, "template_name": "Classify intent w/section (select choice)"}, "can_describe": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='scicite', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='can_describe', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "scicite", "evaluation": {"accuracy": 0.34279475982532753}, "template_name": "can_describe"}}, "selqa_answer_selection_analysis": {"is-he-talking-about": {"arguments": "Namespace(config_name=None, dataset_config_name='answer_selection_analysis', dataset_name='selqa', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='is-he-talking-about', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "answer_selection_analysis", "dataset_name": "selqa", "evaluation": {"accuracy": 0.9031847133757962}, "template_name": "is-he-talking-about"}, "make-sense-rand": {"arguments": "Namespace(config_name=None, dataset_config_name='answer_selection_analysis', dataset_name='selqa', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='make-sense-rand', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "answer_selection_analysis", "dataset_name": "selqa", "evaluation": {"accuracy": 0.8815286624203822}, "template_name": "make-sense-rand"}, "which-answer-1st-vs-random": {"arguments": "Namespace(config_name=None, dataset_config_name='answer_selection_analysis', dataset_name='selqa', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='which-answer-1st-vs-random', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "answer_selection_analysis", "dataset_name": "selqa", "evaluation": {"accuracy": 0.8726114649681529}, "template_name": "which-answer-1st-vs-random"}, "would-make-sense-qu-rand": {"arguments": "Namespace(config_name=None, dataset_config_name='answer_selection_analysis', dataset_name='selqa', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='validation', target_max_length=256, template_config_name=None, template_name='would-make-sense-qu-rand', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": "answer_selection_analysis", "dataset_name": "selqa", "evaluation": {"accuracy": 0.9121019108280255}, "template_name": "would-make-sense-qu-rand"}}, "snips_built_in_intents_None": {"categorize_query": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='snips_built_in_intents', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='categorize_query', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "snips_built_in_intents", "evaluation": {"accuracy": 0.39939024390243905}, "template_name": "categorize_query"}, "categorize_query_brief": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='snips_built_in_intents', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='categorize_query_brief', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "snips_built_in_intents", "evaluation": {"accuracy": 0.36585365853658536}, "template_name": "categorize_query_brief"}, "intent_query": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='snips_built_in_intents', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='intent_query', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "snips_built_in_intents", "evaluation": {"accuracy": 0.31097560975609756}, "template_name": "intent_query"}, "query_intent": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='snips_built_in_intents', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='query_intent', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "snips_built_in_intents", "evaluation": {"accuracy": 0.5823170731707317}, "template_name": "query_intent"}, "voice_intent": {"arguments": "Namespace(config_name=None, dataset_config_name=None, dataset_name='snips_built_in_intents', debug=False, dtype='float16', max_length=2048, model_name_or_path='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq', output_dir='/gpfsscratch/rech/six/commun/experiments/muennighoff/bloomckpt/6b3t0/tr13f-6b3-ml-t0-lmtoks341b-t0toks4b2-xp3capmixnewcodelonglossseq/evaluation', pad_to_max_length=False, per_device_eval_batch_size=8, prefixlm=False, split='train', target_max_length=256, template_config_name=None, template_name='voice_intent', tokenizer_name=None, use_slow_tokenizer=False)", "dataset_config_name": null, "dataset_name": "snips_built_in_intents", "evaluation": {"accuracy": 0.5762195121951219}, "template_name": "voice_intent"}}, "wmt14_fr_en_en-fr": {"a_good_translation-en-fr-source+target": {"bleu": 3.901997019133066, "bleu_stderr": 0.1547388039874437}, "a_good_translation-en-fr-target": {"bleu": 1.3934207960053382, "bleu_stderr": 0.08288490850849818}, "gpt3-en-fr": {"bleu": 0.08726814351547542, "bleu_stderr": 0.010429600640492285}, "version-en-fr-target": {"bleu": 4.126763289443808, "bleu_stderr": 0.14580464495476425}, "xglm-en-fr-target": {"bleu": 25.941476321250327, "bleu_stderr": 0.3907523684923812}}, "wmt14_fr_en_fr-en": {"a_good_translation-fr-en-source+target": {"bleu": 29.535567491027066, "bleu_stderr": 0.30494252895085205}, "a_good_translation-fr-en-target": {"bleu": 10.053995021986518, "bleu_stderr": 0.35757884053451566}, "gpt3-fr-en": {"bleu": 5.996411710924088, "bleu_stderr": 0.28548291384524305}, "version-fr-en-target": {"bleu": 25.433669341195383, "bleu_stderr": 0.3687499893706392}, "xglm-fr-en-target": {"bleu": 28.9915194963351, "bleu_stderr": 0.21848030246758274}}, "wmt14_hi_en_en-hi": {"a_good_translation-en-hi-source+target": {"bleu": 0.6990276538877561, "bleu_stderr": 0.08808237164901266}, "a_good_translation-en-hi-target": {"bleu": 0.18050206530453908, "bleu_stderr": 0.035780140427454685}, "gpt-3-en-hi-target": {"bleu": 2.984520737729336e-08, "bleu_stderr": 2.514575992012923e-08}, "version-en-hi-target": {"bleu": 0.7268866226269155, "bleu_stderr": 0.08613285628731362}, "xglm-en-hi-target": {"bleu": 6.785861030301621, "bleu_stderr": 0.1648705793132264}}, "wmt14_hi_en_hi-en": {"a_good_translation-hi-en-source+target": {"bleu": 15.724256465201472, "bleu_stderr": 0.28972186943927425}, "a_good_translation-hi-en-target": {"bleu": 6.515805969434861, "bleu_stderr": 0.16917094398914678}, "gpt-3-hi-en-target": {"bleu": 1.9706666216345307e-160, "bleu_stderr": 4.4302079675042133e-113}, "version-hi-en-target": {"bleu": 15.422032309127792, "bleu_stderr": 0.2911447014743906}, "xglm-hi-en-target": {"bleu": 17.022583047573708, "bleu_stderr": 0.28125854531182937}}}