|
def set_cora_preset(): |
|
return ( |
|
"gsarti/cora_mgen", |
|
"<Q>:{current} <P>:{context}", |
|
"<Q>:{current}", |
|
) |
|
|
|
|
|
def set_default_preset(): |
|
return ( |
|
"gpt2", |
|
"{current} {context}", |
|
"{current}", |
|
"{current}", |
|
[], |
|
"", |
|
"{}", |
|
"{}", |
|
"{}", |
|
"{}", |
|
) |
|
|
|
|
|
def set_zephyr_preset(): |
|
return ( |
|
"stabilityai/stablelm-2-zephyr-1_6b", |
|
"<|system|>\n{context}</s>\n<|user|>\n{current}</s>\n<|assistant|>\n", |
|
"<|user|>\n{current}</s>\n<|assistant|>\n", |
|
"\n", |
|
) |
|
|
|
|
|
def set_chatml_preset(): |
|
return ( |
|
"Qwen/Qwen1.5-0.5B-Chat", |
|
"<|im_start|>system\n{context}<|im_end|>\n<|im_start|>user\n{current}<|im_end|>\n<|im_start|>assistant\n", |
|
"<|im_start|>user\n{current}<|im_end|>\n<|im_start|>assistant\n", |
|
"", |
|
["<|im_start|>", "<|im_end|>"], |
|
) |
|
|
|
|
|
def set_mmt_preset(): |
|
return ( |
|
"facebook/mbart-large-50-one-to-many-mmt", |
|
"{context} {current}", |
|
"{context} {current}", |
|
'{\n\t"src_lang": "en_XX",\n\t"tgt_lang": "fr_XX"\n}', |
|
) |
|
|
|
|
|
def set_towerinstruct_preset(): |
|
return ( |
|
"Unbabel/TowerInstruct-7B-v0.1", |
|
"<|im_start|>user\nSource: {current}\nContext: {context}\nTranslate the above text into French. Use the context to guide your answer.\nTarget:<|im_end|>\n<|im_start|>assistant\n", |
|
"<|im_start|>user\nSource: {current}\nTranslate the above text into French.\nTarget:<|im_end|>\n<|im_start|>assistant\n", |
|
"", |
|
["<|im_start|>", "<|im_end|>"], |
|
) |
|
|
|
def set_gemma_preset(): |
|
return ( |
|
"google/gemma-2b-it", |
|
"<start_of_turn>user\n{context}\n{current}<end_of_turn>\n<start_of_turn>model\n", |
|
"<start_of_turn>user\n{current}<end_of_turn>\n<start_of_turn>model\n", |
|
"", |
|
["<start_of_turn>", "<end_of_turn>"], |
|
) |
|
|