loubnabnl HF staff commited on
Commit
5583322
1 Parent(s): de3668d

Update github-code-clean.py

Browse files
Files changed (1) hide show
  1. github-code-clean.py +6 -15
github-code-clean.py CHANGED
@@ -20,8 +20,6 @@ import pyarrow as pa
20
  import pyarrow.parquet as pq
21
 
22
  import datasets
23
- from huggingface_hub import HfApi, HfFolder
24
- from datasets.data_files import DataFilesDict
25
 
26
  _REPO_NAME = "codeparrot/github-code-clean"
27
 
@@ -159,19 +157,12 @@ class GithubCode(datasets.GeneratorBasedBuilder):
159
  )
160
 
161
  def _split_generators(self, dl_manager):
162
-
163
- hfh_dataset_info = HfApi(datasets.config.HF_ENDPOINT).dataset_info(
164
- _REPO_NAME,
165
- timeout=100.0,
166
- )
167
-
168
- patterns = datasets.data_files.get_patterns_in_dataset_repository(hfh_dataset_info)
169
- data_files = datasets.data_files.DataFilesDict.from_hf_repo(
170
- patterns,
171
- dataset_info=hfh_dataset_info,
172
- )
173
-
174
- files = dl_manager.download_and_extract(data_files["train"])
175
  return [
176
  datasets.SplitGenerator(
177
  name=datasets.Split.TRAIN,
20
  import pyarrow.parquet as pq
21
 
22
  import datasets
 
 
23
 
24
  _REPO_NAME = "codeparrot/github-code-clean"
25
 
157
  )
158
 
159
  def _split_generators(self, dl_manager):
160
+ num_shards = 1126
161
+ data_files = [
162
+ f"data/train-{_index:05d}-of-{num_shards:05d}.parquet"
163
+ for _index in range(num_shards)
164
+ ]
165
+ files = dl_manager.download(data_files)
 
 
 
 
 
 
 
166
  return [
167
  datasets.SplitGenerator(
168
  name=datasets.Split.TRAIN,