support chat_stream
Browse files- modeling_qwen.py +1 -1
modeling_qwen.py
CHANGED
@@ -1026,7 +1026,7 @@ class QWenLMHeadModel(QWenPreTrainedModel):
|
|
1026 |
seed=-1,
|
1027 |
**kwargs):
|
1028 |
outputs.append(token.item())
|
1029 |
-
yield tokenizer.decode(outputs, skip_special_tokens=True, errors='ignore')
|
1030 |
|
1031 |
return stream_generator()
|
1032 |
|
|
|
1026 |
seed=-1,
|
1027 |
**kwargs):
|
1028 |
outputs.append(token.item())
|
1029 |
+
yield tokenizer.decode(outputs, skip_special_tokens=True, errors='ignore', keep_image_special=True))
|
1030 |
|
1031 |
return stream_generator()
|
1032 |
|