Pclanglais commited on
Commit
adf682d
1 Parent(s): f00d616

Upload oa_load_async.py

Browse files
Files changed (1) hide show
  1. oa_load_async.py +143 -0
oa_load_async.py ADDED
@@ -0,0 +1,143 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import os
3
+ import random
4
+ from collections import deque
5
+ import ssl
6
+ from urllib.parse import urlparse
7
+
8
+ import aiohttp
9
+ import polars as pl
10
+ import aiofiles
11
+ from aiohttp import TCPConnector
12
+
13
+ # from limiter import Limiter
14
+ import certifi
15
+
16
+ from tenacity import retry, wait_exponential, wait_random_exponential
17
+ from tqdm.asyncio import tqdm_asyncio
18
+
19
+ # limit_downloads = Limiter(rate=20, capacity=1000, consume=1)
20
+
21
+ BATCH_SIZE = 100
22
+ BASE_DOWNLOAD_PATH = "/mnt/jupiter/openalex_extraction/openalex_2"
23
+ TIME_OUT = 60
24
+ USER_AGENTS = [
25
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
26
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
27
+ "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
28
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Firefox/89.0",
29
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:89.0) Gecko/20100101 Firefox/89.0",
30
+ ]
31
+
32
+ HEADER = lambda x: {
33
+ "User-Agent": random.choice(USER_AGENTS),
34
+ "Referer": x,
35
+ }
36
+
37
+
38
+ async def get_request(session, queries: list, uuid: str) -> tuple[str, bytes | None]:
39
+ # Create SSL context
40
+ ssl_context = ssl.create_default_context()
41
+ ssl_context.check_hostname = True
42
+ ssl_context.verify_mode = ssl.CERT_REQUIRED
43
+
44
+ for query in queries:
45
+ try:
46
+ async with session.get(url=query, headers=HEADER(query)) as response:
47
+ content = await response.read()
48
+ uuid = urlparse(uuid).path.replace(r"/", "")
49
+ filename = os.path.join(BASE_DOWNLOAD_PATH, f"{uuid}.pdf")
50
+ file_number = 1
51
+ while os.path.exists(filename):
52
+ filename = os.path.join(
53
+ BASE_DOWNLOAD_PATH, f"{uuid}_{file_number}.pdf"
54
+ )
55
+ file_number += 1
56
+ # async with aiofiles.open(filename, "wb") as f:
57
+ # await f.write(content)
58
+ return filename, content
59
+ except Exception as e:
60
+ print(f"An error occurred with query {query}: {e}")
61
+ continue # Try the next query in the list
62
+
63
+ # If all queries fail, handle the last one properly
64
+ print(f"All queries failed: {queries}")
65
+ return "error", None
66
+
67
+
68
+ async def get_batched(session, batch, seen: set = None):
69
+ if not seen:
70
+ seen = set()
71
+ tasks = []
72
+ for q in batch:
73
+ if q:
74
+ task = asyncio.ensure_future(get_request(session, queries=q[0], uuid=q[1]))
75
+ tasks.append(task)
76
+ return await tqdm_asyncio.gather(
77
+ *tasks, desc="Collecting batch", leave=True, position=0
78
+ )
79
+
80
+
81
+ async def main(file_loc):
82
+ # or df = datasets.load_dataset(url, split="train").to_polars().lazy()
83
+ df = pl.scan_parquet(file_loc)
84
+
85
+ total_rows = df.collect().height
86
+
87
+ # Calculate the number of rows to slice from the start index to the end
88
+ start_index = 0
89
+ num_rows = total_rows - start_index
90
+
91
+ df = df.slice(0, num_rows)
92
+ df = (
93
+ df.with_columns(pl.col("pdf_url").str.split(","))
94
+ .select(["identifier", "pdf_url"])
95
+ .collect(streaming=True)
96
+ .iter_rows(named=True)
97
+ )
98
+
99
+ batches = deque()
100
+ output = []
101
+
102
+ # Create SSL context
103
+ ssl_context = ssl.create_default_context()
104
+ ssl_context.check_hostname = True
105
+ ssl_context.verify_mode = ssl.CERT_REQUIRED
106
+
107
+ timeout = aiohttp.ClientTimeout(total=TIME_OUT)
108
+
109
+ for row in df:
110
+ batches.append((row["pdf_url"], row["identifier"]))
111
+ if len(batches) == BATCH_SIZE:
112
+ async with aiohttp.ClientSession(
113
+ connector=TCPConnector(ssl=ssl_context, limit=50),
114
+ timeout=timeout,
115
+ ) as session:
116
+ responses = await get_batched(session, batches)
117
+ for filename, content in responses:
118
+ if content:
119
+ with open(filename, "wb") as f:
120
+ f.write(content)
121
+ output.append(filename)
122
+ batches.clear()
123
+
124
+ if batches:
125
+ async with aiohttp.ClientSession(
126
+ connector=TCPConnector(ssl=ssl_context, limit=50), timeout=timeout
127
+ ) as session:
128
+ responses = await get_batched(session, batches)
129
+ print("Saving Batch")
130
+ for filename, content in responses:
131
+ if content:
132
+ with open(filename, "wb") as f:
133
+ f.write(content)
134
+ output.append(filename)
135
+ print("Batch Saved")
136
+
137
+ return output
138
+
139
+
140
+ if __name__ == "__main__":
141
+ FILE_LOCATION = "/mnt/jupiter/openalex_extraction/openalex_extraction_2.parquet"
142
+ results = asyncio.run(main(FILE_LOCATION))
143
+ print(results)