fix for transformers>4.33

#11
Files changed (1) hide show
  1. tokenization_chatglm.py +12 -12
tokenization_chatglm.py CHANGED
@@ -193,6 +193,18 @@ class ChatGLMTokenizer(PreTrainedTokenizer):
193
  num_image_tokens=20000,
194
  **kwargs
195
  ) -> None:
 
 
 
 
 
 
 
 
 
 
 
 
196
  super().__init__(
197
  do_lower_case=do_lower_case,
198
  remove_space=remove_space,
@@ -208,18 +220,6 @@ class ChatGLMTokenizer(PreTrainedTokenizer):
208
  **kwargs
209
  )
210
 
211
- self.do_lower_case = do_lower_case
212
- self.remove_space = remove_space
213
- self.vocab_file = vocab_file
214
-
215
- self.bos_token = bos_token
216
- self.eos_token = eos_token
217
- self.end_token = end_token
218
- self.mask_token = mask_token
219
- self.gmask_token = gmask_token
220
-
221
- self.sp_tokenizer = SPTokenizer(vocab_file, num_image_tokens=num_image_tokens)
222
-
223
  """ Initialisation """
224
 
225
  @property
 
193
  num_image_tokens=20000,
194
  **kwargs
195
  ) -> None:
196
+ self.do_lower_case = do_lower_case
197
+ self.remove_space = remove_space
198
+ self.vocab_file = vocab_file
199
+
200
+ self.bos_token = bos_token
201
+ self.eos_token = eos_token
202
+ self.end_token = end_token
203
+ self.mask_token = mask_token
204
+ self.gmask_token = gmask_token
205
+
206
+ self.sp_tokenizer = SPTokenizer(vocab_file, num_image_tokens=num_image_tokens)
207
+
208
  super().__init__(
209
  do_lower_case=do_lower_case,
210
  remove_space=remove_space,
 
220
  **kwargs
221
  )
222
 
 
 
 
 
 
 
 
 
 
 
 
 
223
  """ Initialisation """
224
 
225
  @property