duzx16 commited on
Commit
bba4277
1 Parent(s): fc3235f

Fix type hint

Browse files
Files changed (1) hide show
  1. modeling_chatglm.py +2 -2
modeling_chatglm.py CHANGED
@@ -1018,7 +1018,7 @@ class ChatGLMForConditionalGeneration(ChatGLMPreTrainedModel):
1018
  return content, history
1019
 
1020
  @torch.inference_mode()
1021
- def chat(self, tokenizer, query: str, history: List[Tuple[str, str]] = None, role: str = "user",
1022
  max_length: int = 8192, num_beams=1, do_sample=True, top_p=0.8, temperature=0.8, logits_processor=None,
1023
  **kwargs):
1024
  if history is None:
@@ -1040,7 +1040,7 @@ class ChatGLMForConditionalGeneration(ChatGLMPreTrainedModel):
1040
  return response, history
1041
 
1042
  @torch.inference_mode()
1043
- def stream_chat(self, tokenizer, query: str, history: List[Tuple[str, str]] = None, role: str = "user",
1044
  past_key_values=None,max_length: int = 8192, do_sample=True, top_p=0.8, temperature=0.8,
1045
  logits_processor=None, return_past_key_values=False, **kwargs):
1046
  if history is None:
 
1018
  return content, history
1019
 
1020
  @torch.inference_mode()
1021
+ def chat(self, tokenizer, query: str, history: List[Dict] = None, role: str = "user",
1022
  max_length: int = 8192, num_beams=1, do_sample=True, top_p=0.8, temperature=0.8, logits_processor=None,
1023
  **kwargs):
1024
  if history is None:
 
1040
  return response, history
1041
 
1042
  @torch.inference_mode()
1043
+ def stream_chat(self, tokenizer, query: str, history: List[Dict] = None, role: str = "user",
1044
  past_key_values=None,max_length: int = 8192, do_sample=True, top_p=0.8, temperature=0.8,
1045
  logits_processor=None, return_past_key_values=False, **kwargs):
1046
  if history is None: