def set_cora_preset(): return ( "gsarti/cora_mgen", # model_name_or_path ":{current}

:{context}", # input_template ":{current}", # input_current_text_template ) def set_default_preset(): return ( "gpt2", # model_name_or_path "{current} {context}", # input_template "{current}", # input_current_template "{current}", # output_template [], # special_tokens_to_keep "", # decoder_input_output_separator "{}", # model_kwargs "{}", # tokenizer_kwargs "{}", # generation_kwargs "{}", # attribution_kwargs ) def set_zephyr_preset(): return ( "stabilityai/stablelm-2-zephyr-1_6b", # model_name_or_path "<|system|>\n{context}\n<|user|>\n{current}\n<|assistant|>\n", # input_template "<|user|>\n{current}\n<|assistant|>\n", # input_current_text_template "\n", # decoder_input_output_separator ) def set_chatml_preset(): return ( "Qwen/Qwen1.5-0.5B-Chat", # model_name_or_path "<|im_start|>system\n{context}<|im_end|>\n<|im_start|>user\n{current}<|im_end|>\n<|im_start|>assistant\n", # input_template "<|im_start|>user\n{current}<|im_end|>\n<|im_start|>assistant\n", # input_current_text_template "", # decoder_input_output_separator ["<|im_start|>", "<|im_end|>"], # special_tokens_to_keep ) def set_mmt_preset(): return ( "facebook/mbart-large-50-one-to-many-mmt", # model_name_or_path "{context} {current}", # input_template "{context} {current}", # output_template '{\n\t"src_lang": "en_XX",\n\t"tgt_lang": "fr_XX"\n}', # tokenizer_kwargs ) def set_towerinstruct_preset(): return ( "Unbabel/TowerInstruct-7B-v0.1", # model_name_or_path "<|im_start|>user\nSource: {current}\nContext: {context}\nTranslate the above text into French. Use the context to guide your answer.\nTarget:<|im_end|>\n<|im_start|>assistant\n", # input_template "<|im_start|>user\nSource: {current}\nTranslate the above text into French.\nTarget:<|im_end|>\n<|im_start|>assistant\n", # input_current_text_template "", # decoder_input_output_separator )