fix when use_cache = False,inference 乱码

#85
Files changed (1) hide show
  1. modeling_chatglm.py +3 -2
modeling_chatglm.py CHANGED
@@ -904,8 +904,9 @@ class ChatGLMForConditionalGeneration(ChatGLMPreTrainedModel):
904
  if position_ids is None:
905
  position_ids = self.get_position_ids(input_ids, device=input_ids.device)
906
  if not is_first_forward:
907
- position_ids = position_ids[..., -1:]
908
- input_ids = input_ids[:, -1:]
 
909
  return {
910
  "input_ids": input_ids,
911
  "past_key_values": past_key_values,
 
904
  if position_ids is None:
905
  position_ids = self.get_position_ids(input_ids, device=input_ids.device)
906
  if not is_first_forward:
907
+ if self.config.use_cache:
908
+ position_ids = position_ids[..., -1:]
909
+ input_ids = input_ids[:, -1:]
910
  return {
911
  "input_ids": input_ids,
912
  "past_key_values": past_key_values,