Datasets:
ArXiv:
License:
Refactor loading script
#6
by
albertvillanova
HF staff
- opened
- samanantar.py +9 -17
samanantar.py
CHANGED
@@ -14,9 +14,7 @@
|
|
14 |
# limitations under the License.
|
15 |
"""Samanantar dataset."""
|
16 |
|
17 |
-
import
|
18 |
-
|
19 |
-
import pandas as pd
|
20 |
|
21 |
import datasets
|
22 |
|
@@ -78,25 +76,19 @@ class Samanantar(datasets.GeneratorBasedBuilder):
|
|
78 |
|
79 |
def _split_generators(self, dl_manager):
|
80 |
urls = _URLS[str(self.config.version)]
|
81 |
-
|
82 |
return [
|
83 |
datasets.SplitGenerator(
|
84 |
name=datasets.Split.TRAIN,
|
85 |
gen_kwargs={
|
86 |
-
"
|
87 |
},
|
88 |
),
|
89 |
]
|
90 |
|
91 |
-
def _generate_examples(self,
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
for line_src, line_tgt in zip(en, config_language):
|
99 |
-
yield id_, {"idx": id_ ,"src": line_src.strip(), "tgt": line_tgt.strip()}
|
100 |
-
id_ += 1
|
101 |
-
|
102 |
-
break # to prevent it from getting repeated examples (it seems the dataset gets repeated in en-te folder)
|
|
|
14 |
# limitations under the License.
|
15 |
"""Samanantar dataset."""
|
16 |
|
17 |
+
from pathlib import Path
|
|
|
|
|
18 |
|
19 |
import datasets
|
20 |
|
|
|
76 |
|
77 |
def _split_generators(self, dl_manager):
|
78 |
urls = _URLS[str(self.config.version)]
|
79 |
+
data_dir = dl_manager.download_and_extract(urls)
|
80 |
return [
|
81 |
datasets.SplitGenerator(
|
82 |
name=datasets.Split.TRAIN,
|
83 |
gen_kwargs={
|
84 |
+
"data_dir": (Path(data_dir) / "v2" / f"en-{self.config.language}"),
|
85 |
},
|
86 |
),
|
87 |
]
|
88 |
|
89 |
+
def _generate_examples(self, data_dir):
|
90 |
+
src_path = data_dir / "train.en"
|
91 |
+
tgt_path = data_dir / f"train.{self.config.language}"
|
92 |
+
with src_path.open(encoding="utf-8") as src_file, tgt_path.open(encoding="utf-8") as tgt_file:
|
93 |
+
for idx, (src_line, tgt_line) in enumerate(zip(src_file, tgt_file)):
|
94 |
+
yield idx, {"idx": idx, "src": src_line.strip(), "tgt": tgt_line.strip()}
|
|
|
|
|
|
|
|
|
|
|
|