Text2Text Generation
Transformers
Safetensors
101 languages
t5
Inference Endpoints
text-generation-inference

JSONDecodeError: Expecting value: line 1 column 1 (char 0)

#10
by GokhanAI - opened

JSONDecodeError Traceback (most recent call last)
Cell In[9], line 6
3 path = "/opt/GPT/MODEL/aya-101/"
5 tokenizer = AutoTokenizer.from_pretrained(path)
----> 6 model = AutoModelForSeq2SeqLM.from_pretrained(path)

File /opt/anaconda3/envs/gpt/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py:566, in _BaseAutoModelClass.from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs)
564 elif type(config) in cls._model_mapping.keys():
565 model_class = _get_model_class(config, cls._model_mapping)
--> 566 return model_class.from_pretrained(
567 pretrained_model_name_or_path, *model_args, config=config, **hub_kwargs, **kwargs
568 )
569 raise ValueError(
570 f"Unrecognized configuration class {config.class} for this kind of AutoModel: {cls.name}.\n"
571 f"Model type should be one of {', '.join(c.name for c in cls._model_mapping.keys())}."
572 )

File /opt/anaconda3/envs/gpt/lib/python3.10/site-packages/transformers/modeling_utils.py:3485, in PreTrainedModel.from_pretrained(cls, pretrained_model_name_or_path, config, cache_dir, ignore_mismatched_sizes, force_download, local_files_only, token, revision, use_safetensors, *model_args, **kwargs)
3482 # We'll need to download and cache each checkpoint shard if the checkpoint is sharded.
3483 if is_sharded:
3484 # rsolved_archive_file becomes a list of files that point to the different checkpoint shards in this case.
-> 3485 resolved_archive_file, sharded_metadata = get_checkpoint_shard_files(
3486 pretrained_model_name_or_path,
3487 resolved_archive_file,
3488 cache_dir=cache_dir,
3489 force_download=force_download,
3490 proxies=proxies,
3491 resume_download=resume_download,
3492 local_files_only=local_files_only,
3493 token=token,
3494 user_agent=user_agent,
3495 revision=revision,
3496 subfolder=subfolder,
3497 _commit_hash=commit_hash,
3498 )
3500 if (
3501 is_safetensors_available()
3502 and isinstance(resolved_archive_file, str)
3503 and resolved_archive_file.endswith(".safetensors")
3504 ):
3505 with safe_open(resolved_archive_file, framework="pt") as f:

File /opt/anaconda3/envs/gpt/lib/python3.10/site-packages/transformers/utils/hub.py:1002, in get_checkpoint_shard_files(pretrained_model_name_or_path, index_filename, cache_dir, force_download, proxies, resume_download, local_files_only, token, user_agent, revision, subfolder, _commit_hash, **deprecated_kwargs)
999 raise ValueError(f"Can't find a checkpoint index ({index_filename}) in {pretrained_model_name_or_path}.")
1001 with open(index_filename, "r") as f:
-> 1002 index = json.loads(f.read())
1004 shard_filenames = sorted(set(index["weight_map"].values()))
1005 sharded_metadata = index["metadata"]

File /opt/anaconda3/envs/gpt/lib/python3.10/json/init.py:346, in loads(s, cls, object_hook, parse_float, parse_int, parse_constant, object_pairs_hook, **kw)
341 s = s.decode(detect_encoding(s), 'surrogatepass')
343 if (cls is None and object_hook is None and
344 parse_int is None and parse_float is None and
345 parse_constant is None and object_pairs_hook is None and not kw):
--> 346 return _default_decoder.decode(s)
347 if cls is None:
348 cls = JSONDecoder

File /opt/anaconda3/envs/gpt/lib/python3.10/json/decoder.py:337, in JSONDecoder.decode(self, s, _w)
332 def decode(self, s, _w=WHITESPACE.match):
333 """Return the Python representation of s (a str instance
334 containing a JSON document).
335
336 """
--> 337 obj, end = self.raw_decode(s, idx=_w(s, 0).end())
338 end = _w(s, end).end()
339 if end != len(s):

File /opt/anaconda3/envs/gpt/lib/python3.10/json/decoder.py:355, in JSONDecoder.raw_decode(self, s, idx)
353 obj, end = self.scan_once(s, idx)
354 except StopIteration as err:
--> 355 raise JSONDecodeError("Expecting value", s, err.value) from None
356 return obj, end

JSONDecodeError: Expecting value: line 1 column 1 (char 0)

Cohere For AI org

Could you provide a bit more detail on what code you ran, and the example you used (if any)?

I had the same error, and it seems the file "model.safetensors.index.json" is the cause of this error. When you clone the files using git, for some reason this file doesn't get cloned properly.

to solve this just remove the file "model.safetensors.index.json" from the model folder, then go to HF and download this file separately by clicking download, and move it to the model folder.

Wow, Afalqannas, that was very helpful. IT WORKED!

Cohere For AI org

closing this issue as it seems to be resolved.

shivi changed discussion status to closed

Sign up or log in to comment