will33am commited on
Commit
7c26d90
1 Parent(s): 52d3711

fix path pkl

Browse files
.ipynb_checkpoints/AVA-checkpoint.py CHANGED
@@ -50,7 +50,7 @@ class AVA(datasets.GeneratorBasedBuilder):
50
 
51
  def _generate_examples(self, archives, split):
52
  """Yields examples."""
53
- DICT_METADATA = Path(dl_manager.download_and_extract(_BASE_HF_URL)) / "metadata.pkl")
54
  idx = 0
55
  for archive in archives:
56
  for path, file in archive:
 
50
 
51
  def _generate_examples(self, archives, split):
52
  """Yields examples."""
53
+ DICT_METADATA = Path(dl_manager.download_and_extract(_BASE_HF_URL)) / "metadata.pkl"
54
  idx = 0
55
  for archive in archives:
56
  for path, file in archive:
AVA.py CHANGED
@@ -50,7 +50,7 @@ class AVA(datasets.GeneratorBasedBuilder):
50
 
51
  def _generate_examples(self, archives, split):
52
  """Yields examples."""
53
- DICT_METADATA = Path(dl_manager.download_and_extract(_BASE_HF_URL)) / "metadata.pkl")
54
  idx = 0
55
  for archive in archives:
56
  for path, file in archive:
 
50
 
51
  def _generate_examples(self, archives, split):
52
  """Yields examples."""
53
+ DICT_METADATA = Path(dl_manager.download_and_extract(_BASE_HF_URL)) / "metadata.pkl"
54
  idx = 0
55
  for archive in archives:
56
  for path, file in archive:
notebooks/Test.ipynb CHANGED
@@ -2,7 +2,7 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 1,
6
  "id": "aef315bf",
7
  "metadata": {},
8
  "outputs": [],
@@ -12,45 +12,66 @@
12
  },
13
  {
14
  "cell_type": "code",
15
- "execution_count": 2,
16
  "id": "c0ed6498",
17
  "metadata": {},
18
  "outputs": [
19
  {
20
  "data": {
21
  "application/vnd.jupyter.widget-view+json": {
22
- "model_id": "ea63e16347094b7a98bb795cd5eb74fa",
23
  "version_major": 2,
24
  "version_minor": 0
25
  },
26
  "text/plain": [
27
- "Downloading builder script: 0%| | 0.00/2.08k [00:00<?, ?B/s]"
28
  ]
29
  },
30
  "metadata": {},
31
  "output_type": "display_data"
32
  },
33
  {
34
- "ename": "FileNotFoundError",
35
- "evalue": "[Errno 2] No such file or directory: 'data/metadata.pkl'",
36
- "output_type": "error",
37
- "traceback": [
38
- "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
39
- "\u001b[0;31mFileNotFoundError\u001b[0m Traceback (most recent call last)",
40
- "File \u001b[0;32m<timed exec>:1\u001b[0m\n",
41
- "File \u001b[0;32m/opt/conda/envs/hugginface/lib/python3.8/site-packages/datasets/load.py:1734\u001b[0m, in \u001b[0;36mload_dataset\u001b[0;34m(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, revision, use_auth_token, task, streaming, num_proc, **config_kwargs)\u001b[0m\n\u001b[1;32m 1731\u001b[0m ignore_verifications \u001b[38;5;241m=\u001b[39m ignore_verifications \u001b[38;5;129;01mor\u001b[39;00m save_infos\n\u001b[1;32m 1733\u001b[0m \u001b[38;5;66;03m# Create a dataset builder\u001b[39;00m\n\u001b[0;32m-> 1734\u001b[0m builder_instance \u001b[38;5;241m=\u001b[39m \u001b[43mload_dataset_builder\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1735\u001b[0m \u001b[43m \u001b[49m\u001b[43mpath\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpath\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1736\u001b[0m \u001b[43m \u001b[49m\u001b[43mname\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mname\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1737\u001b[0m \u001b[43m \u001b[49m\u001b[43mdata_dir\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdata_dir\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1738\u001b[0m \u001b[43m \u001b[49m\u001b[43mdata_files\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdata_files\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1739\u001b[0m \u001b[43m \u001b[49m\u001b[43mcache_dir\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcache_dir\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1740\u001b[0m \u001b[43m \u001b[49m\u001b[43mfeatures\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfeatures\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1741\u001b[0m \u001b[43m \u001b[49m\u001b[43mdownload_config\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdownload_config\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1742\u001b[0m \u001b[43m \u001b[49m\u001b[43mdownload_mode\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdownload_mode\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1743\u001b[0m \u001b[43m \u001b[49m\u001b[43mrevision\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrevision\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1744\u001b[0m \u001b[43m \u001b[49m\u001b[43muse_auth_token\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43muse_auth_token\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1745\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mconfig_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1746\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1748\u001b[0m \u001b[38;5;66;03m# Return iterable dataset in case of streaming\u001b[39;00m\n\u001b[1;32m 1749\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m streaming:\n",
42
- "File \u001b[0;32m/opt/conda/envs/hugginface/lib/python3.8/site-packages/datasets/load.py:1502\u001b[0m, in \u001b[0;36mload_dataset_builder\u001b[0;34m(path, name, data_dir, data_files, cache_dir, features, download_config, download_mode, revision, use_auth_token, **config_kwargs)\u001b[0m\n\u001b[1;32m 1492\u001b[0m dataset_module \u001b[38;5;241m=\u001b[39m dataset_module_factory(\n\u001b[1;32m 1493\u001b[0m path,\n\u001b[1;32m 1494\u001b[0m revision\u001b[38;5;241m=\u001b[39mrevision,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 1498\u001b[0m data_files\u001b[38;5;241m=\u001b[39mdata_files,\n\u001b[1;32m 1499\u001b[0m )\n\u001b[1;32m 1501\u001b[0m \u001b[38;5;66;03m# Get dataset builder class from the processing script\u001b[39;00m\n\u001b[0;32m-> 1502\u001b[0m builder_cls \u001b[38;5;241m=\u001b[39m \u001b[43mimport_main_class\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdataset_module\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmodule_path\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1503\u001b[0m builder_kwargs \u001b[38;5;241m=\u001b[39m dataset_module\u001b[38;5;241m.\u001b[39mbuilder_kwargs\n\u001b[1;32m 1504\u001b[0m data_files \u001b[38;5;241m=\u001b[39m builder_kwargs\u001b[38;5;241m.\u001b[39mpop(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdata_files\u001b[39m\u001b[38;5;124m\"\u001b[39m, data_files)\n",
43
- "File \u001b[0;32m/opt/conda/envs/hugginface/lib/python3.8/site-packages/datasets/load.py:115\u001b[0m, in \u001b[0;36mimport_main_class\u001b[0;34m(module_path, dataset)\u001b[0m\n\u001b[1;32m 110\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mimport_main_class\u001b[39m(module_path, dataset\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Optional[Union[Type[DatasetBuilder], Type[Metric]]]:\n\u001b[1;32m 111\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Import a module at module_path and return its main class:\u001b[39;00m\n\u001b[1;32m 112\u001b[0m \u001b[38;5;124;03m - a DatasetBuilder if dataset is True\u001b[39;00m\n\u001b[1;32m 113\u001b[0m \u001b[38;5;124;03m - a Metric if dataset is False\u001b[39;00m\n\u001b[1;32m 114\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m--> 115\u001b[0m module \u001b[38;5;241m=\u001b[39m \u001b[43mimportlib\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mimport_module\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodule_path\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 117\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m dataset:\n\u001b[1;32m 118\u001b[0m main_cls_type \u001b[38;5;241m=\u001b[39m DatasetBuilder\n",
44
- "File \u001b[0;32m/opt/conda/envs/hugginface/lib/python3.8/importlib/__init__.py:127\u001b[0m, in \u001b[0;36mimport_module\u001b[0;34m(name, package)\u001b[0m\n\u001b[1;32m 125\u001b[0m \u001b[38;5;28;01mbreak\u001b[39;00m\n\u001b[1;32m 126\u001b[0m level \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[0;32m--> 127\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43m_bootstrap\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_gcd_import\u001b[49m\u001b[43m(\u001b[49m\u001b[43mname\u001b[49m\u001b[43m[\u001b[49m\u001b[43mlevel\u001b[49m\u001b[43m:\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpackage\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mlevel\u001b[49m\u001b[43m)\u001b[49m\n",
45
- "File \u001b[0;32m<frozen importlib._bootstrap>:1014\u001b[0m, in \u001b[0;36m_gcd_import\u001b[0;34m(name, package, level)\u001b[0m\n",
46
- "File \u001b[0;32m<frozen importlib._bootstrap>:991\u001b[0m, in \u001b[0;36m_find_and_load\u001b[0;34m(name, import_)\u001b[0m\n",
47
- "File \u001b[0;32m<frozen importlib._bootstrap>:975\u001b[0m, in \u001b[0;36m_find_and_load_unlocked\u001b[0;34m(name, import_)\u001b[0m\n",
48
- "File \u001b[0;32m<frozen importlib._bootstrap>:671\u001b[0m, in \u001b[0;36m_load_unlocked\u001b[0;34m(spec)\u001b[0m\n",
49
- "File \u001b[0;32m<frozen importlib._bootstrap_external>:843\u001b[0m, in \u001b[0;36mexec_module\u001b[0;34m(self, module)\u001b[0m\n",
50
- "File \u001b[0;32m<frozen importlib._bootstrap>:219\u001b[0m, in \u001b[0;36m_call_with_frames_removed\u001b[0;34m(f, *args, **kwds)\u001b[0m\n",
51
- "File \u001b[0;32m~/.cache/huggingface/modules/datasets_modules/datasets/will33am--AVA/eab4630672ca580bfd7382f451336e260b15853dad5817cdd0432f241d1aa41f/AVA.py:15\u001b[0m\n\u001b[1;32m 11\u001b[0m _DESCRIPTION \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 12\u001b[0m _DATA_URL \u001b[38;5;241m=\u001b[39m {\n\u001b[1;32m 13\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mtrain\u001b[39m\u001b[38;5;124m\"\u001b[39m: [_BASE_HF_URL\u001b[38;5;241m/\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mimages.tar.gz\u001b[39m\u001b[38;5;124m\"\u001b[39m]\n\u001b[1;32m 14\u001b[0m }\n\u001b[0;32m---> 15\u001b[0m DICT_METADATA \u001b[38;5;241m=\u001b[39m \u001b[43mjoblib\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mload\u001b[49m\u001b[43m(\u001b[49m\u001b[43m_BASE_HF_URL\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m/\u001b[39;49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmetadata.pkl\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 18\u001b[0m \u001b[38;5;28;01mclass\u001b[39;00m \u001b[38;5;21;01mAVA\u001b[39;00m(datasets\u001b[38;5;241m.\u001b[39mGeneratorBasedBuilder):\n\u001b[1;32m 19\u001b[0m VERSION \u001b[38;5;241m=\u001b[39m datasets\u001b[38;5;241m.\u001b[39mVersion(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m1.0.0\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n",
52
- "File \u001b[0;32m/opt/conda/envs/hugginface/lib/python3.8/site-packages/joblib/numpy_pickle.py:650\u001b[0m, in \u001b[0;36mload\u001b[0;34m(filename, mmap_mode)\u001b[0m\n\u001b[1;32m 648\u001b[0m obj \u001b[38;5;241m=\u001b[39m _unpickle(fobj)\n\u001b[1;32m 649\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m--> 650\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m \u001b[38;5;28;43mopen\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43mfilename\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mrb\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m)\u001b[49m \u001b[38;5;28;01mas\u001b[39;00m f:\n\u001b[1;32m 651\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m _read_fileobject(f, filename, mmap_mode) \u001b[38;5;28;01mas\u001b[39;00m fobj:\n\u001b[1;32m 652\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(fobj, \u001b[38;5;28mstr\u001b[39m):\n\u001b[1;32m 653\u001b[0m \u001b[38;5;66;03m# if the returned file object is a string, this means we\u001b[39;00m\n\u001b[1;32m 654\u001b[0m \u001b[38;5;66;03m# try to load a pickle file generated with an version of\u001b[39;00m\n\u001b[1;32m 655\u001b[0m \u001b[38;5;66;03m# Joblib so we load it with joblib compatibility function.\u001b[39;00m\n",
53
- "\u001b[0;31mFileNotFoundError\u001b[0m: [Errno 2] No such file or directory: 'data/metadata.pkl'"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
54
  ]
55
  }
56
  ],
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": 3,
6
  "id": "aef315bf",
7
  "metadata": {},
8
  "outputs": [],
 
12
  },
13
  {
14
  "cell_type": "code",
15
+ "execution_count": 4,
16
  "id": "c0ed6498",
17
  "metadata": {},
18
  "outputs": [
19
  {
20
  "data": {
21
  "application/vnd.jupyter.widget-view+json": {
22
+ "model_id": "2dc0ecb6d071440fbb63d9eb37239d51",
23
  "version_major": 2,
24
  "version_minor": 0
25
  },
26
  "text/plain": [
27
+ "Downloading builder script: 0%| | 0.00/2.12k [00:00<?, ?B/s]"
28
  ]
29
  },
30
  "metadata": {},
31
  "output_type": "display_data"
32
  },
33
  {
34
+ "name": "stdout",
35
+ "output_type": "stream",
36
+ "text": [
37
+ "Downloading and preparing dataset ava/default to /home/william/.cache/huggingface/datasets/will33am___ava/default/1.0.0/e6b9e5062c6da3936a91aa998767b2df2e4743203754168806d3df6c592a5951...\n"
38
+ ]
39
+ },
40
+ {
41
+ "data": {
42
+ "application/vnd.jupyter.widget-view+json": {
43
+ "model_id": "17de80e07195435abc87fa6d687bf641",
44
+ "version_major": 2,
45
+ "version_minor": 0
46
+ },
47
+ "text/plain": [
48
+ "Downloading data files: 0%| | 0/1 [00:00<?, ?it/s]"
49
+ ]
50
+ },
51
+ "metadata": {},
52
+ "output_type": "display_data"
53
+ },
54
+ {
55
+ "data": {
56
+ "application/vnd.jupyter.widget-view+json": {
57
+ "model_id": "6bc514a4c642434fab8a1728b6dc4ce9",
58
+ "version_major": 2,
59
+ "version_minor": 0
60
+ },
61
+ "text/plain": [
62
+ "Downloading data: 0%| | 0.00/33.2G [00:00<?, ?B/s]"
63
+ ]
64
+ },
65
+ "metadata": {},
66
+ "output_type": "display_data"
67
+ },
68
+ {
69
+ "name": "stderr",
70
+ "output_type": "stream",
71
+ "text": [
72
+ "\n",
73
+ "KeyboardInterrupt\n",
74
+ "\n"
75
  ]
76
  }
77
  ],