zxdu20 ShiJueXiaofei commited on
Commit
efb7a1e
1 Parent(s): a8c811e

fix when use_cache = False,inference 乱码 (#85)

Browse files

- fix when use_cache = False,inference 乱码 (2cdf703899afca945a221ece195758e33d9c433d)


Co-authored-by: YuanXiaofei <ShiJueXiaofei@users.noreply.huggingface.co>

Files changed (1) hide show
  1. modeling_chatglm.py +3 -2
modeling_chatglm.py CHANGED
@@ -904,8 +904,9 @@ class ChatGLMForConditionalGeneration(ChatGLMPreTrainedModel):
904
  if position_ids is None:
905
  position_ids = self.get_position_ids(input_ids, device=input_ids.device)
906
  if not is_first_forward:
907
- position_ids = position_ids[..., -1:]
908
- input_ids = input_ids[:, -1:]
 
909
  return {
910
  "input_ids": input_ids,
911
  "past_key_values": past_key_values,
 
904
  if position_ids is None:
905
  position_ids = self.get_position_ids(input_ids, device=input_ids.device)
906
  if not is_first_forward:
907
+ if self.config.use_cache:
908
+ position_ids = position_ids[..., -1:]
909
+ input_ids = input_ids[:, -1:]
910
  return {
911
  "input_ids": input_ids,
912
  "past_key_values": past_key_values,