do not work

#1
by beratersari - opened

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("comidan/llama-3-chat-multilingual-v1-8b")
model = AutoModelForCausalLM.from_pretrained("comidan/llama-3-chat-multilingual-v1-8b")

OSError Traceback (most recent call last)
Cell In[52], line 3
1 from transformers import AutoTokenizer, AutoModelForCausalLM
----> 3 tokenizer = AutoTokenizer.from_pretrained("comidan/llama-3-chat-multilingual-v1-8b")
4 model = AutoModelForCausalLM.from_pretrained("comidan/llama-3-chat-multilingual-v1-8b")

File /opt/conda/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py:899, in AutoTokenizer.from_pretrained(cls, pretrained_model_name_or_path, *inputs, **kwargs)
896 tokenizer_class_py, tokenizer_class_fast = TOKENIZER_MAPPING[type(config)]
898 if tokenizer_class_fast and (use_fast or tokenizer_class_py is None):
--> 899 return tokenizer_class_fast.from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs)
900 else:
901 if tokenizer_class_py is not None:

File /opt/conda/lib/python3.10/site-packages/transformers/tokenization_utils_base.py:2095, in PreTrainedTokenizerBase.from_pretrained(cls, pretrained_model_name_or_path, cache_dir, force_download, local_files_only, token, revision, trust_remote_code, *init_inputs, **kwargs)
2092 # If one passes a GGUF file path to gguf_file there is no need for this check as the tokenizer will be
2093 # loaded directly from the GGUF file.
2094 if all(full_file_name is None for full_file_name in resolved_vocab_files.values()) and not gguf_file:
-> 2095 raise EnvironmentError(
2096 f"Can't load tokenizer for '{pretrained_model_name_or_path}'. If you were trying to load it from "
2097 "'https://huggingface.co/models style="color:rgb(175,0,0)">', make sure you don't have a local directory with the same name. "
2098 f"Otherwise, make sure '{pretrained_model_name_or_path}' is the correct path to a directory "
2099 f"containing all relevant files for a {cls.name} tokenizer."
2100 )
2102 for file_id, file_path in vocab_files.items():
2103 if file_id not in resolved_vocab_files:

OSError: Can't load tokenizer for 'comidan/llama-3-chat-multilingual-v1-8b'. If you were trying to load it from 'https://huggingface.co/models', make sure you don't have a local directory with the same name. Otherwise, make sure 'comidan/llama-3-chat-multilingual-v1-8b' is the correct path to a directory containing all relevant files for a LlamaTokenizerFast tokenizer.

Sign up or log in to comment