duzx16
commited on
Commit
•
a1170c5
1
Parent(s):
22f6409
Fix invalid token returned by stream_chat
Browse files- modeling_chatglm.py +7 -6
modeling_chatglm.py
CHANGED
@@ -979,12 +979,13 @@ class ChatGLMForConditionalGeneration(ChatGLMPreTrainedModel):
|
|
979 |
outputs, past_key_values = outputs
|
980 |
outputs = outputs.tolist()[0][len(inputs["input_ids"][0]):]
|
981 |
response = tokenizer.decode(outputs)
|
982 |
-
response
|
983 |
-
|
984 |
-
|
985 |
-
|
986 |
-
|
987 |
-
|
|
|
988 |
|
989 |
@torch.no_grad()
|
990 |
def stream_generate(
|
|
|
979 |
outputs, past_key_values = outputs
|
980 |
outputs = outputs.tolist()[0][len(inputs["input_ids"][0]):]
|
981 |
response = tokenizer.decode(outputs)
|
982 |
+
if response and response[-1] != "�":
|
983 |
+
response = self.process_response(response)
|
984 |
+
new_history = history + [(query, response)]
|
985 |
+
if return_past_key_values:
|
986 |
+
yield response, new_history, past_key_values
|
987 |
+
else:
|
988 |
+
yield response, new_history
|
989 |
|
990 |
@torch.no_grad()
|
991 |
def stream_generate(
|