runtime error

nt call last): File "/home/user/app/app.py", line 14, in <module> tokenizer = AutoTokenizer.from_pretrained(REPO, trust_remote_code=True) File "/home/user/.pyenv/versions/3.10.13/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 738, in from_pretrained return tokenizer_class.from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs) File "/home/user/.pyenv/versions/3.10.13/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2045, in from_pretrained return cls._from_pretrained( File "/home/user/.pyenv/versions/3.10.13/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2256, in _from_pretrained tokenizer = cls(*init_inputs, **init_kwargs) File "/home/user/.cache/huggingface/modules/transformers_modules/replit/replit-code-v1-3b/cc0a4f17a8d72b71d62ea53cb0e23e4dac352067/replit_lm_tokenizer.py", line 66, in __init__ super().__init__(bos_token=bos_token, eos_token=eos_token, unk_token=unk_token, pad_token=pad_token, sep_token=sep_token, sp_model_kwargs=self.sp_model_kwargs, **kwargs) File "/home/user/.pyenv/versions/3.10.13/lib/python3.10/site-packages/transformers/tokenization_utils.py", line 366, in __init__ self._add_tokens(self.all_special_tokens_extended, special_tokens=True) File "/home/user/.pyenv/versions/3.10.13/lib/python3.10/site-packages/transformers/tokenization_utils.py", line 462, in _add_tokens current_vocab = self.get_vocab().copy() File "/home/user/.cache/huggingface/modules/transformers_modules/replit/replit-code-v1-3b/cc0a4f17a8d72b71d62ea53cb0e23e4dac352067/replit_lm_tokenizer.py", line 76, in get_vocab vocab = {self.convert_ids_to_tokens(i): i for i in range(self.vocab_size)} File "/home/user/.cache/huggingface/modules/transformers_modules/replit/replit-code-v1-3b/cc0a4f17a8d72b71d62ea53cb0e23e4dac352067/replit_lm_tokenizer.py", line 73, in vocab_size return self.sp_model.get_piece_size() AttributeError: 'ReplitLMTokenizer' object has no attribute 'sp_model'

Container logs:

Fetching error logs...