Datasets:

Multilinguality:
multilingual
Size Categories:
10K<n<100K
1K<n<10K
Language Creators:
crowdsourced
Annotations Creators:
crowdsourced
Source Datasets:
original
ArXiv:
Tags:
License:
system HF staff commited on
Commit
15cec6a
1 Parent(s): 7f98913

Update files from the datasets library (from 1.18.0)

Browse files

Release notes: https://github.com/huggingface/datasets/releases/tag/1.18.0

Files changed (2) hide show
  1. README.md +1 -0
  2. create_dummy.py +7 -6
README.md CHANGED
@@ -88,6 +88,7 @@ task_categories:
88
  task_ids:
89
  - summarization
90
  paperswithcode_id: wikilingua
 
91
  ---
92
  # Dataset Card for "wiki_lingua"
93
 
88
  task_ids:
89
  - summarization
90
  paperswithcode_id: wikilingua
91
+ pretty_name: WikiLingua
92
  ---
93
  # Dataset Card for "wiki_lingua"
94
 
create_dummy.py CHANGED
@@ -1,4 +1,5 @@
1
  import itertools
 
2
  import os
3
  import pickle
4
  import shutil
@@ -46,7 +47,7 @@ def create():
46
  base_path = "/Users/katnoria/dev/projects/workspaces/python/datasets"
47
  for key in _URLs.keys():
48
  # data = load_dataset('./datasets/wiki_lingua', key)
49
- print(f"Finding {key}.pkl")
50
  filepath = [name for name in files if name.endswith(f"{key}.pkl")][0]
51
  with open(filepath, "rb") as f:
52
  data = pickle.load(f)
@@ -55,13 +56,13 @@ def create():
55
  fname = sanitize_url(_URLs[key])
56
  dirname = pjoin(base_path, f"datasets/wiki_lingua/dummy/{key}/1.1.0/dummy_data")
57
  if not os.path.exists(dirname):
58
- print(f"created folder {dirname}")
59
  os.makedirs(dirname)
60
  fname = pjoin(dirname, fname)
61
- print(f"creating for {key}:{fname}")
62
  with open(fname, "wb") as f:
63
  pickle.dump(data_subset, f)
64
- print("SUCCESS")
65
 
66
 
67
  def zip():
@@ -70,10 +71,10 @@ def zip():
70
  for key in _URLs.keys():
71
  # dirname = pjoin(base_path, f"datasets/wiki_lingua/dummy/{key}/1.1.0/dummy_data")
72
  dirname = pjoin(base_path, f"datasets/wiki_lingua/dummy/{key}/1.1.0")
73
- print(f"Zipping {dirname}")
74
  shutil.make_archive(f"{dirname}/dummy_data", "zip", dirname, "dummy_data")
75
  shutil.rmtree(f"{dirname}/dummy_data")
76
- print(f"Deleted folder {dirname}/dummy_data")
77
 
78
 
79
  # Utility script to create the dummy data and zip the contents
1
  import itertools
2
+ import logging
3
  import os
4
  import pickle
5
  import shutil
47
  base_path = "/Users/katnoria/dev/projects/workspaces/python/datasets"
48
  for key in _URLs.keys():
49
  # data = load_dataset('./datasets/wiki_lingua', key)
50
+ logging.info(f"Finding {key}.pkl")
51
  filepath = [name for name in files if name.endswith(f"{key}.pkl")][0]
52
  with open(filepath, "rb") as f:
53
  data = pickle.load(f)
56
  fname = sanitize_url(_URLs[key])
57
  dirname = pjoin(base_path, f"datasets/wiki_lingua/dummy/{key}/1.1.0/dummy_data")
58
  if not os.path.exists(dirname):
59
+ logging.info(f"created folder {dirname}")
60
  os.makedirs(dirname)
61
  fname = pjoin(dirname, fname)
62
+ logging.info(f"creating for {key}:{fname}")
63
  with open(fname, "wb") as f:
64
  pickle.dump(data_subset, f)
65
+ logging.info("SUCCESS")
66
 
67
 
68
  def zip():
71
  for key in _URLs.keys():
72
  # dirname = pjoin(base_path, f"datasets/wiki_lingua/dummy/{key}/1.1.0/dummy_data")
73
  dirname = pjoin(base_path, f"datasets/wiki_lingua/dummy/{key}/1.1.0")
74
+ logging.info(f"Zipping {dirname}")
75
  shutil.make_archive(f"{dirname}/dummy_data", "zip", dirname, "dummy_data")
76
  shutil.rmtree(f"{dirname}/dummy_data")
77
+ logging.info(f"Deleted folder {dirname}/dummy_data")
78
 
79
 
80
  # Utility script to create the dummy data and zip the contents