leaderboard / deployment /fastchat_xgen_fix.patch
Jae-Won Chung
Add FastChat xgen inference fix patch
a916f49
--- a/fastchat/conversation.py
+++ b/fastchat/conversation.py
@@ -787,12 +787,12 @@ def get_conv_template(name: str) -> Conversation:
Conversation(
name="xgen",
system_message="A chat between a curious human and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the human's questions.\n\n",
- roles=("### Human: ", "###"),
+ roles=("### Human", "### Assistant"),
messages=(),
offset=0,
- sep_style=SeparatorStyle.NO_COLON_SINGLE,
+ sep_style=SeparatorStyle.ADD_COLON_SINGLE,
sep="\n",
- stop_token_ids=[50256, 0, 1, 2],
+ stop_token_ids=[50256],
stop_str="<|endoftext|>",
)
)