Fraser commited on
Commit
1ba53c1
·
1 Parent(s): 54694dd
Files changed (2) hide show
  1. README.md +7 -0
  2. wiki_sentences.py +3 -1
README.md CHANGED
@@ -5,3 +5,10 @@ A dataset of all the sentences in Wikipedia.
5
  Filtered to only include sentences <=64 characters.
6
 
7
  Taken from the OPTIMUS project. https://github.com/ChunyuanLI/Optimus/blob/master/download_datasets.md
 
 
 
 
 
 
 
 
5
  Filtered to only include sentences <=64 characters.
6
 
7
  Taken from the OPTIMUS project. https://github.com/ChunyuanLI/Optimus/blob/master/download_datasets.md
8
+
9
+ The dataset is 11.8GB so best to load it using streaming:
10
+
11
+ ```python
12
+ from datasets import load_dataset
13
+ dataset = load_dataset("Fraser/wiki_sentences", split='train', streaming=True)
14
+ ```
wiki_sentences.py CHANGED
@@ -39,6 +39,8 @@ class WikiSentences(datasets.GeneratorBasedBuilder):
39
  def _generate_examples(self, filepath):
40
  with open(filepath, encoding="utf-8") as txt_file:
41
  for i, line in enumerate(txt_file):
 
 
42
  line = line.strip()
43
- if len(line) <= 64:
44
  yield i, {"text": line}
 
39
  def _generate_examples(self, filepath):
40
  with open(filepath, encoding="utf-8") as txt_file:
41
  for i, line in enumerate(txt_file):
42
+ import pdb
43
+ pdb.set_trace()
44
  line = line.strip()
45
+ if line and len(line) <= 64:
46
  yield i, {"text": line}