bapatra damajercakms commited on
Commit
e6adf2a
1 Parent(s): f5527db

Update tokenization_phi3_small.py (#18)

Browse files

- Update tokenization_phi3_small.py (3558f70d46aadc019146e8744232c2374234b8c1)


Co-authored-by: David Majercak <damajercakms@users.noreply.huggingface.co>

Files changed (1) hide show
  1. tokenization_phi3_small.py +4 -6
tokenization_phi3_small.py CHANGED
@@ -180,12 +180,10 @@ class Phi3SmallTokenizer(PreTrainedTokenizer):
180
  # First try to load from the tokenization config if it exists
181
  tokenization_config = get_tokenizer_config(pretrained_model_name_or_path, **kwargs)
182
  if tokenization_config:
183
- cls_kwargs.update(
184
- dict(
185
- model_max_length=tokenization_config["model_max_length"],
186
- chat_template=tokenization_config.get("chat_template", None)
187
- )
188
- )
189
  else:
190
  config = AutoConfig.from_pretrained(pretrained_model_name_or_path, trust_remote_code=True)
191
  cls_kwargs["model_max_length"] = config.max_position_embeddings
 
180
  # First try to load from the tokenization config if it exists
181
  tokenization_config = get_tokenizer_config(pretrained_model_name_or_path, **kwargs)
182
  if tokenization_config:
183
+ cls_kwargs = {
184
+ **tokenization_config,
185
+ **cls_kwargs
186
+ }
 
 
187
  else:
188
  config = AutoConfig.from_pretrained(pretrained_model_name_or_path, trust_remote_code=True)
189
  cls_kwargs["model_max_length"] = config.max_position_embeddings