paniniDot commited on
Commit
72aed7c
1 Parent(s): d686db1

Update sci_lay.py

Browse files
Files changed (1) hide show
  1. sci_lay.py +38 -38
sci_lay.py CHANGED
@@ -118,46 +118,46 @@ class SciLay(datasets.GeneratorBasedBuilder):
118
  )
119
 
120
  def _split_generators(self, dl_manager):
121
- """Returns SplitGenerators."""
122
- urls = {
123
- split: _URL.format(version=self.config.version, split_name=split_name)
124
- for split, split_name in _SPLIT_NAMES.items()
125
- }
126
- dl_paths = dl_manager.download_and_extract(urls)
127
- paths = {
128
- split: [
129
- dl_manager.iter_files(os.path.join(dl_paths[split], split_name, code)) for code in self.config.journals
130
- ]
131
- for split, split_name in _SPLIT_NAMES.items()
132
- }
133
- return [
134
- datasets.SplitGenerator(
135
- name=split,
136
- gen_kwargs={"paths": paths[split]},
137
- )
138
- for split in _SPLIT_NAMES
139
  ]
 
 
 
 
 
 
 
 
 
140
 
141
  def _generate_examples(self, paths=None):
142
- """Yields examples."""
143
- unique_dois = set() # To keep track of unique DOIs
144
- for paths_per_journal in paths:
145
- for path in paths_per_journal:
146
- with open(path, "rb") as fin:
147
- for row in fin:
148
- json_obj = json.loads(row)
149
- doi = json_obj[_DOI]
150
- if doi not in unique_dois:
151
- unique_dois.add(doi)
152
- example = {
153
- _DOI: doi,
154
- _PMCID: json_obj[_PMCID],
155
- _SUMMARY: json_obj[_SUMMARY],
156
- _ABSTRACT: json_obj[_ABSTRACT],
157
- _JOURNAL: json_obj[_JOURNAL],
158
- _TOPICS: json_obj[_TOPICS],
159
- _KEYWORDS: json_obj[_KEYWORDS]
160
- }
161
- yield doi, example
162
 
163
 
 
118
  )
119
 
120
  def _split_generators(self, dl_manager):
121
+ """Returns SplitGenerators."""
122
+ urls = {
123
+ split: _URL.format(version=self.config.version, split_name=split_name)
124
+ for split, split_name in _SPLIT_NAMES.items()
125
+ }
126
+ dl_paths = dl_manager.download_and_extract(urls)
127
+ paths = {
128
+ split: [
129
+ dl_manager.iter_files(os.path.join(dl_paths[split], split_name, code)) for code in self.config.journals
 
 
 
 
 
 
 
 
 
130
  ]
131
+ for split, split_name in _SPLIT_NAMES.items()
132
+ }
133
+ return [
134
+ datasets.SplitGenerator(
135
+ name=split,
136
+ gen_kwargs={"paths": paths[split]},
137
+ )
138
+ for split in _SPLIT_NAMES
139
+ ]
140
 
141
  def _generate_examples(self, paths=None):
142
+ """Yields examples."""
143
+ unique_dois = set() # To keep track of unique DOIs
144
+ for paths_per_journal in paths:
145
+ for path in paths_per_journal:
146
+ with open(path, "rb") as fin:
147
+ for row in fin:
148
+ json_obj = json.loads(row)
149
+ doi = json_obj[_DOI]
150
+ if doi not in unique_dois:
151
+ unique_dois.add(doi)
152
+ example = {
153
+ _DOI: doi,
154
+ _PMCID: json_obj[_PMCID],
155
+ _SUMMARY: json_obj[_SUMMARY],
156
+ _ABSTRACT: json_obj[_ABSTRACT],
157
+ _JOURNAL: json_obj[_JOURNAL],
158
+ _TOPICS: json_obj[_TOPICS],
159
+ _KEYWORDS: json_obj[_KEYWORDS]
160
+ }
161
+ yield doi, example
162
 
163