maldv commited on
Commit
f43ee22
1 Parent(s): cfbf96d

added books

Browse files
Files changed (2) hide show
  1. cyberpunk.parquet +2 -2
  2. epub-processing.ipynb +157 -9
cyberpunk.parquet CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5e2ece676a87abd0bdda7109f1e109abad8d76e89ad0b4fc48a1ed00ecb1c1ae
3
- size 11751919
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5958da11650c2ed4aa29583b4daa13c3399b33c042f7829e9694afa9515d65b
3
+ size 39566245
epub-processing.ipynb CHANGED
@@ -30,7 +30,7 @@
30
  " noext = os.path.splitext(basename)[0]\n",
31
  " chapter_idx = 0\n",
32
  " paragraph_idx = 0\n",
33
- " cumsum_word_count = 0\n",
34
  " for item in book.get_items_of_type(ebooklib.ITEM_DOCUMENT):\n",
35
  " content = item.get_content().decode('utf-8')\n",
36
  " results = list(html_tokenizer(content, try_chapter))\n",
@@ -41,9 +41,9 @@
41
  " if len(row[1]) == 0:\n",
42
  " continue\n",
43
  " paragraph_idx += 1\n",
44
- " word_count = len((row[1]))\n",
45
- " cumsum_word_count += word_count\n",
46
- " row = [noext, paragraph_idx, chapter_idx] + list(row[:]) + [word_count, cumsum_word_count]\n",
47
  " yield tuple(row)\n",
48
  "\n",
49
  "def html_tokenizer(html_content: str, try_chapter) -> Generator[tuple, None, None]:\n",
@@ -125,8 +125,8 @@
125
  "name": "stdout",
126
  "output_type": "stream",
127
  "text": [
128
- "Processing allsystemsred\n",
129
- "Processing alteredcarbon\n"
130
  ]
131
  },
132
  {
@@ -141,32 +141,114 @@
141
  "name": "stdout",
142
  "output_type": "stream",
143
  "text": [
 
 
 
 
 
144
  "Processing artificialcondition\n",
 
 
 
 
 
145
  "Processing burningchrome\n",
146
  "Processing canticleforleibowitz\n",
 
 
 
147
  "Processing cryptonomicon\n",
148
  "Clipping (1, 2)\n",
 
 
149
  "Processing exitstrategy\n",
 
 
150
  "Processing forwearemany\n",
151
  "Processing fugutuvetelemetry\n",
 
 
152
  "Processing gnomon\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
153
  "Processing monalisa\n",
154
  "Clipping (2969, 2984)\n",
155
  "Processing networkeffect\n",
156
  "Processing neuromancer\n",
157
  "Clipping (1, 49)\n",
 
 
 
 
 
 
158
  "Processing quicksilver\n",
159
  "Processing readyplayerone\n",
 
 
160
  "Processing rogueprotocol\n",
 
 
 
 
161
  "Processing snowcrash\n",
162
  "Clipping (1, 80)\n",
163
  "Clipping (4590, 4636)\n",
 
 
 
 
 
 
164
  "Processing systemcollapse\n",
165
  "Processing systemofworld\n",
166
  "Clipping (261, 331)\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
167
  "Processing theconfusion\n",
168
  "Processing themoonisaharshmistress\n",
169
- "Saved 72413 paragraphs to cyberpunk.parquet\n"
 
 
 
 
 
 
 
170
  ]
171
  }
172
  ],
@@ -198,9 +280,75 @@
198
  " 'gnomon': {'drop': [1,2,3,4,5,34,35,36]},\n",
199
  " 'themoonisaharshmistress': {'drop': [1,2,3,4]},\n",
200
  " 'cantileforleibowitz': {'drop': [1,2,3,4,37,38]},\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
201
  "}\n",
202
  "\n",
203
- "all_books = pd.DataFrame([], columns=['book_name', 'paragraph_ix', 'chapter_ix', 'chapter_title', 'text', 'word_count', 'cumsum_word_count'])\n",
204
  "for book in sorted(glob('source/*.epub')):\n",
205
  " name = os.path.splitext(os.path.basename(book))[0]\n",
206
  " print(f\"Processing {name}\")\n",
@@ -216,7 +364,7 @@
216
  " drops = config['drop']\n",
217
  "\n",
218
  " lines = parse_ebook_html(book, try_chapter=try_chapter)\n",
219
- " new_frame = pd.DataFrame(lines, columns=['book_name', 'paragraph_ix', 'chapter_ix', 'chapter_title', 'text', 'word_count', 'cumsum_word_count'])\n",
220
  " for drop in drops:\n",
221
  " new_frame = new_frame[new_frame['chapter_ix'] != drop]\n",
222
  " for clip in clips:\n",
 
30
  " noext = os.path.splitext(basename)[0]\n",
31
  " chapter_idx = 0\n",
32
  " paragraph_idx = 0\n",
33
+ " cumsum_char_count = 0\n",
34
  " for item in book.get_items_of_type(ebooklib.ITEM_DOCUMENT):\n",
35
  " content = item.get_content().decode('utf-8')\n",
36
  " results = list(html_tokenizer(content, try_chapter))\n",
 
41
  " if len(row[1]) == 0:\n",
42
  " continue\n",
43
  " paragraph_idx += 1\n",
44
+ " char_count = len((row[1]))\n",
45
+ " cumsum_char_count += char_count\n",
46
+ " row = [noext, paragraph_idx, chapter_idx] + list(row[:]) + [char_count, cumsum_char_count]\n",
47
  " yield tuple(row)\n",
48
  "\n",
49
  "def html_tokenizer(html_content: str, try_chapter) -> Generator[tuple, None, None]:\n",
 
125
  "name": "stdout",
126
  "output_type": "stream",
127
  "text": [
128
+ "Processing 1984\n",
129
+ "Processing 2xs\n"
130
  ]
131
  },
132
  {
 
141
  "name": "stdout",
142
  "output_type": "stream",
143
  "text": [
144
+ "Processing agency\n",
145
+ "Processing all_these_worlds\n",
146
+ "Processing all_tomorrows_parties\n",
147
+ "Processing allsystemsred\n",
148
+ "Processing alteredcarbon\n",
149
  "Processing artificialcondition\n",
150
+ "Processing beyond_the_pale\n",
151
+ "Processing black_madonna\n",
152
+ "Processing blood_sport\n",
153
+ "Processing brave_new_world\n",
154
+ "Processing burning_bright\n",
155
  "Processing burningchrome\n",
156
  "Processing canticleforleibowitz\n",
157
+ "Processing changeling\n",
158
+ "Processing choose_your_enemies_carefully\n",
159
+ "Processing clockwork_asylum\n",
160
  "Processing cryptonomicon\n",
161
  "Clipping (1, 2)\n",
162
+ "Processing dead_air\n",
163
+ "Processing distraction\n",
164
  "Processing exitstrategy\n",
165
+ "Processing fade_to_black\n",
166
+ "Processing find_your_own_truth\n",
167
  "Processing forwearemany\n",
168
  "Processing fugutuvetelemetry\n",
169
+ "Processing ghost_in_the_wires\n",
170
+ "Processing globalhead\n",
171
  "Processing gnomon\n",
172
+ "Processing headhunters\n",
173
+ "Processing heavens_river\n",
174
+ "Processing heavy_weather\n",
175
+ "Processing holy_fire\n",
176
+ "Processing house_of_the_sun\n",
177
+ "Processing i_robot\n",
178
+ "Processing idoru\n",
179
+ "Processing into_the_shadows\n",
180
+ "Processing islands_in_the_net\n",
181
+ "Processing johnny_mnemonic\n",
182
+ "Processing just_compensation\n",
183
+ "Processing lone_wolf\n",
184
+ "Processing mirrorshades\n",
185
  "Processing monalisa\n",
186
  "Clipping (2969, 2984)\n",
187
  "Processing networkeffect\n",
188
  "Processing neuromancer\n",
189
  "Clipping (1, 49)\n",
190
+ "Processing never_deal_with_a_dragon\n",
191
+ "Processing never_trust_an_elf\n",
192
+ "Processing nights_pawn\n",
193
+ "Processing nosferatu\n",
194
+ "Processing pattern_recognition\n",
195
+ "Processing preying_for_keeps\n",
196
  "Processing quicksilver\n",
197
  "Processing readyplayerone\n",
198
+ "Processing reamde\n",
199
+ "Processing red_storm_rising\n",
200
  "Processing rogueprotocol\n",
201
+ "Processing schismatrix_plus\n",
202
+ "Processing seveneves\n",
203
+ "Processing shadowboxer\n",
204
+ "Processing shadowplay\n",
205
  "Processing snowcrash\n",
206
  "Clipping (1, 80)\n",
207
  "Clipping (4590, 4636)\n",
208
+ "Processing spook_country\n",
209
+ "Processing steel_rain\n",
210
+ "Processing stranger_in_a_strange_land\n",
211
+ "Processing stranger_souls\n",
212
+ "Processing streets_of_blood\n",
213
+ "Processing striper_assassin\n",
214
  "Processing systemcollapse\n",
215
  "Processing systemofworld\n",
216
  "Clipping (261, 331)\n",
217
+ "Processing technobabel\n",
218
+ "Processing termination_shock\n",
219
+ "Processing the_art_of_deception\n",
220
+ "Processing the_art_of_intrusion\n",
221
+ "Processing the_demolished_man\n",
222
+ "Processing the_diamond_age\n"
223
+ ]
224
+ },
225
+ {
226
+ "name": "stderr",
227
+ "output_type": "stream",
228
+ "text": [
229
+ "/mnt/biggy/ai/notebook/jupyterenv/lib/python3.10/site-packages/ebooklib/epub.py:1395: UserWarning: In the future version we will turn default option ignore_ncx to True.\n",
230
+ " warnings.warn('In the future version we will turn default option ignore_ncx to True.')\n"
231
+ ]
232
+ },
233
+ {
234
+ "name": "stdout",
235
+ "output_type": "stream",
236
+ "text": [
237
+ "Processing the_lucifer_deck\n",
238
+ "Processing the_paradise_factory\n",
239
+ "Processing the_peripheral\n",
240
+ "Processing the_stars_my_destination\n",
241
+ "Processing the_zenith_angle\n",
242
  "Processing theconfusion\n",
243
  "Processing themoonisaharshmistress\n",
244
+ "Processing this_automatic_eden\n",
245
+ "Processing virtual_light\n",
246
+ "Processing we_are_legion\n",
247
+ "Processing who_hunts_the_hunter\n",
248
+ "Processing wolf_raven\n",
249
+ "Processing worlds_without_end\n",
250
+ "Processing zero_history\n",
251
+ "Saved 276984 paragraphs to cyberpunk.parquet\n"
252
  ]
253
  }
254
  ],
 
280
  " 'gnomon': {'drop': [1,2,3,4,5,34,35,36]},\n",
281
  " 'themoonisaharshmistress': {'drop': [1,2,3,4]},\n",
282
  " 'cantileforleibowitz': {'drop': [1,2,3,4,37,38]},\n",
283
+ " '1984': {'drop': [1,2,3,4,5]},\n",
284
+ " '2xs': {'drop': [1,2,32], 'try_chapter': True},\n",
285
+ " 'agency': {'drop': [1,112,113,114,115,116]},\n",
286
+ " 'all_these_worlds': {'drop': [1,2,3,4,5,6,7], 'try_chapter': True},\n",
287
+ " 'all_tomorrows_parties': {'drop': [1,2,3,4]},\n",
288
+ " 'beyond_the_pale': {'drop': [1,2,3,4,5,59,60,61], 'try_chapter': True},\n",
289
+ " 'black_madonna': {'drop': [1,2,33], 'try_chapter': True},\n",
290
+ " 'blood_sport': {'drop': [1,2,3,4,33,34], 'try_chapter': True},\n",
291
+ " 'brave_new_world': {'drop': [1,2,3], 'try_chapter': True},\n",
292
+ " 'burning_bright': {'drop': [1], 'try_chapter': True},\n",
293
+ " 'changeling': {},\n",
294
+ " 'choose_your_enemies_carefully': {'drop': [1,2,3,4,51,52], 'try_chapter': True},\n",
295
+ " 'clockwork_asylum': {'drop': [1,2,3,50,51], 'try_chapter': True},\n",
296
+ " 'dead_air': {'drop': [1,2,3,4,5,6,68,69,70,71], 'try_chapter': True},\n",
297
+ " 'distraction': {'drop': [1,2,3,4,5,18]},\n",
298
+ " 'fade_to_black': {'drop': [45, 46]},\n",
299
+ " 'find_your_own_truth': {'drop': [1,2,3,4,40,41], 'try_chapter': True},\n",
300
+ " 'ghost_in_the_wires': {'drop': [1,2,3,4,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68]},\n",
301
+ " 'globalhead': {'drop': [1,2,3,4,18,19,20]},\n",
302
+ " 'headhunters': {'drop': [1,2,3,62,63,64], 'try_chapter': True},\n",
303
+ " 'heavens_river': {'drop': [1,2,3,4,5,6,7,8,78,79,80], 'try_chapter': True},\n",
304
+ " 'heavy_weather': {'drop': [1,2,3,4,15]},\n",
305
+ " 'holy_fire': {'drop': [1,2,3,4,11]},\n",
306
+ " 'house_of_the_sun': {'drop': [1,2,3,4,5,33], 'try_chapter': True},\n",
307
+ " 'i_robot': {'drop': [1,2,13,14,15]},\n",
308
+ " 'idoru': {'drop': [49,50,51,52]},\n",
309
+ " 'into_the_shadows': {'drop': [1,2,3,4,15,16,17,18], 'try_chapter': True},\n",
310
+ " 'islands_in_the_net': {'drop': [1,13,14,15,16], 'try_chapter': True},\n",
311
+ " 'johnny_mnemonic': {'try_chapter': True},\n",
312
+ " 'just_compensation': {'drop': [1,2,3,4,56,57], 'try_chapter': True},\n",
313
+ " 'lone_wolf': {'drop': [1]},\n",
314
+ " 'mirrorshades': {'drop': [14,15,16,17,18,19,20]},\n",
315
+ " 'never_deal_with_a_dragon': {'drop': [60,61]},\n",
316
+ " 'never_trust_an_elf': {'drop': [1,2,3,4,38,39], 'try_chapter': True},\n",
317
+ " 'nights_pawn': {'drop': [1], 'try_chapter': True},\n",
318
+ " 'nosferatu': {'drop': [1,2,3,34], 'try_chapter': True},\n",
319
+ " 'pattern_recognition': {'drop': [43], 'try_chapter': True},\n",
320
+ " 'preying_for_keeps': {'drop': [1,2,3,4,5,44,45], 'try_chapter': True},\n",
321
+ " 'reamde': {'drop': [1,2,3,4,5,28,29,30,31,32,33]},\n",
322
+ " 'schismatrix_plus': {'drop': [1,2,3,4]},\n",
323
+ " 'seveneves': {'drop': [1,2,3,4,23]},\n",
324
+ " 'shadowboxer': {'drop': [1,2,3,4,5,43,44], 'try_chapter': True},\n",
325
+ " 'shadowplay': {'drop': [1,2,3,4,43], 'try_chapter': True},\n",
326
+ " 'spook_country': {'drop': [1,2,3,4,5,6,7,8,93,94]},\n",
327
+ " 'steel_rain': {'drop': [1,2,3,45,46], 'try_chapter': True},\n",
328
+ " 'stranger_in_a_strange_land': {'drop': [1,2,3]},\n",
329
+ " 'stranger_souls': {'drop': [1,2,3,61,62], 'try_chapter': True},\n",
330
+ " 'streets_of_blood': {'drop': [1,2,3,38], 'try_chapter': True},\n",
331
+ " 'striper_assassin': {},\n",
332
+ " 'technobabel': {'drop': [1,2,3,4,5,37,38], 'try_chapter': True},\n",
333
+ " 'termination_shock': {'drop': [1,2,3,55], 'try_chapter': True},\n",
334
+ " 'the_art_of_deception': {'drop': [1,2,3,4,5,6,7,8,9,10,11,28,29,30,31], 'try_chapter': True},\n",
335
+ " 'the_demolished_man': {'drop': [1,2,3,4,5,6], 'try_chapter': True},\n",
336
+ " 'the_diamond_age': {'drop': [1,2,3,10,11,12,13]},\n",
337
+ " 'the_lucifer_deck': {'drop': [1,2,3,4,36,37]},\n",
338
+ " 'the_paradise_factory': {'drop': [1]},\n",
339
+ " 'the_peripheral': {'drop': [1,2,3,4,129,130], 'try_chapter': True},\n",
340
+ " 'the_stars_my_destination': {'drop': [1,2], 'try_chapter': True},\n",
341
+ " 'the_zenith_angle': {'drop': [1,2,3], 'try_chapter': True},\n",
342
+ " 'this_automatic_eden': {'drop': [1,69,70]},\n",
343
+ " 'virtual_light': {'drop': [1,2,3,4,44,45]},\n",
344
+ " 'we_are_legion': {'drop': [1,2,3,4]},\n",
345
+ " 'who_hunts_the_hunter': {'drop': [1,2,3,4,5,93,94], 'try_chapter': True},\n",
346
+ " 'wolf_raven': {'drop': [1]},\n",
347
+ " 'worlds_without_end': {'drop': [1,2,3,4,5,34,35,36,37]},\n",
348
+ " 'zero_history': {'drop': [1,2,90,91]},\n",
349
  "}\n",
350
  "\n",
351
+ "all_books = pd.DataFrame([], columns=['book_name', 'paragraph_ix', 'chapter_ix', 'chapter_title', 'text', 'char_count', 'cumsum_char_count'])\n",
352
  "for book in sorted(glob('source/*.epub')):\n",
353
  " name = os.path.splitext(os.path.basename(book))[0]\n",
354
  " print(f\"Processing {name}\")\n",
 
364
  " drops = config['drop']\n",
365
  "\n",
366
  " lines = parse_ebook_html(book, try_chapter=try_chapter)\n",
367
+ " new_frame = pd.DataFrame(lines, columns=['book_name', 'paragraph_ix', 'chapter_ix', 'chapter_title', 'text', 'char_count', 'cumsum_char_count'])\n",
368
  " for drop in drops:\n",
369
  " new_frame = new_frame[new_frame['chapter_ix'] != drop]\n",
370
  " for clip in clips:\n",