
katanaml/layoutlmv2-finetuned-cord
•
Updated
•
166
•
1
Error code: StreamingRowsError Exception: FileNotFoundError Message: https://huggingface.co/datasets/katanaml/cord/resolve/main/dataset.zip/CORD/train Traceback: Traceback (most recent call last): File "/src/workers/datasets_based/.venv/lib/python3.9/site-packages/fsspec/implementations/http.py", line 411, in _info await _file_info( File "/src/workers/datasets_based/.venv/lib/python3.9/site-packages/fsspec/implementations/http.py", line 827, in _file_info r.raise_for_status() File "/src/workers/datasets_based/.venv/lib/python3.9/site-packages/aiohttp/client_reqrep.py", line 1005, in raise_for_status raise ClientResponseError( aiohttp.client_exceptions.ClientResponseError: 404, message='Not Found', url=URL('https://huggingface.co/datasets/katanaml/cord/resolve/main/dataset.zip/CORD/train') The above exception was the direct cause of the following exception: Traceback (most recent call last): File "/src/workers/datasets_based/src/datasets_based/workers/first_rows.py", line 485, in compute_first_rows_response rows = get_rows( File "/src/workers/datasets_based/src/datasets_based/workers/first_rows.py", line 120, in decorator return func(*args, **kwargs) File "/src/workers/datasets_based/src/datasets_based/workers/first_rows.py", line 176, in get_rows rows_plus_one = list(itertools.islice(ds, rows_max_number + 1)) File "/src/workers/datasets_based/.venv/lib/python3.9/site-packages/datasets/iterable_dataset.py", line 917, in __iter__ for key, example in ex_iterable: File "/src/workers/datasets_based/.venv/lib/python3.9/site-packages/datasets/iterable_dataset.py", line 113, in __iter__ yield from self.generate_examples_fn(**self.kwargs) File "/tmp/modules-cache/datasets_modules/datasets/katanaml--cord/fa0960248a7d19cf19675785d5d3dd9eab83b4aea9274b97943d534be56d8a91/cord.py", line 119, in _generate_examples for file in sorted(os.listdir(ann_dir)): File "/src/workers/datasets_based/.venv/lib/python3.9/site-packages/datasets/streaming.py", line 70, in wrapper return function(*args, use_auth_token=use_auth_token, **kwargs) File "/src/workers/datasets_based/.venv/lib/python3.9/site-packages/datasets/download/streaming_download_manager.py", line 533, in xlistdir fs, *_ = fsspec.get_fs_token_paths(path, storage_options=storage_options) File "/src/workers/datasets_based/.venv/lib/python3.9/site-packages/fsspec/core.py", line 586, in get_fs_token_paths fs = filesystem(protocol, **inkwargs) File "/src/workers/datasets_based/.venv/lib/python3.9/site-packages/fsspec/registry.py", line 252, in filesystem return cls(**storage_options) File "/src/workers/datasets_based/.venv/lib/python3.9/site-packages/fsspec/spec.py", line 76, in __call__ obj = super().__call__(*args, **kwargs) File "/src/workers/datasets_based/.venv/lib/python3.9/site-packages/fsspec/implementations/zip.py", line 53, in __init__ self.fo = fo.__enter__() # the whole instance is a context File "/src/workers/datasets_based/.venv/lib/python3.9/site-packages/fsspec/core.py", line 102, in __enter__ f = self.fs.open(self.path, mode=mode) File "/src/workers/datasets_based/.venv/lib/python3.9/site-packages/fsspec/spec.py", line 1135, in open f = self._open( File "/src/workers/datasets_based/.venv/lib/python3.9/site-packages/fsspec/implementations/http.py", line 350, in _open size = size or self.info(path, **kwargs)["size"] File "/src/workers/datasets_based/.venv/lib/python3.9/site-packages/fsspec/asyn.py", line 114, in wrapper return sync(self.loop, func, *args, **kwargs) File "/src/workers/datasets_based/.venv/lib/python3.9/site-packages/fsspec/asyn.py", line 99, in sync raise return_result File "/src/workers/datasets_based/.venv/lib/python3.9/site-packages/fsspec/asyn.py", line 54, in _runner result[0] = await coro File "/src/workers/datasets_based/.venv/lib/python3.9/site-packages/fsspec/implementations/http.py", line 424, in _info raise FileNotFoundError(url) from exc FileNotFoundError: https://huggingface.co/datasets/katanaml/cord/resolve/main/dataset.zip/CORD/train
Need help to make the dataset viewer work? Open an discussion for direct support.
CORD dataset is cloned from clovaai GitHub repo
replacing_labels = ['menu.etc', 'menu.itemsubtotal',
'menu.sub_etc', 'menu.sub_unitprice',
'menu.vatyn', 'void_menu.nm',
'void_menu.price', 'sub_total.othersvc_price']
Check for more info Sparrow
@article{park2019cord,
title={CORD: A Consolidated Receipt Dataset for Post-OCR Parsing},
author={Park, Seunghyun and Shin, Seung and Lee, Bado and Lee, Junyeop and Surh, Jaeheung and Seo, Minjoon and Lee, Hwalsuk}
booktitle={Document Intelligence Workshop at Neural Information Processing Systems}
year={2019}
}
@article{hwang2019post,
title={Post-OCR parsing: building simple and robust parser via BIO tagging},
author={Hwang, Wonseok and Kim, Seonghyeon and Yim, Jinyeong and Seo, Minjoon and Park, Seunghyun and Park, Sungrae and Lee, Junyeop and Lee, Bado and Lee, Hwalsuk}
booktitle={Document Intelligence Workshop at Neural Information Processing Systems}
year={2019}
}