File size: 9,648 Bytes
66641c2
8dcc49f
 
c38bd79
a007a27
c38bd79
8dcc49f
 
 
 
 
66641c2
21275ec
66641c2
3beb07e
cad07b9
 
 
 
 
8dcc49f
 
 
3beb07e
8dcc49f
c38bd79
 
cf1c265
c38bd79
8dcc49f
 
 
 
 
 
 
 
 
 
 
a9a935f
 
8dcc49f
c38bd79
 
a007a27
 
c38bd79
 
 
8dcc49f
a007a27
e14c7a4
8dcc49f
66641c2
 
 
 
 
 
 
 
 
 
 
 
 
 
8dcc49f
 
21275ec
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a007a27
 
8dcc49f
66641c2
 
 
 
 
 
 
 
 
 
 
 
 
 
8dcc49f
 
a007a27
 
551703a
66641c2
 
 
 
 
 
 
 
8dcc49f
66641c2
 
 
 
 
3beb07e
 
a007a27
 
cd59c29
66641c2
 
 
 
 
 
 
 
cd59c29
66641c2
 
 
 
 
cd59c29
 
a007a27
 
3beb07e
66641c2
 
 
3beb07e
66641c2
 
 
 
 
 
 
 
3beb07e
66641c2
3beb07e
 
a007a27
 
3beb07e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cd59c29
 
 
 
 
 
 
 
 
 
3beb07e
 
a007a27
3beb07e
a007a27
8dcc49f
c38bd79
 
 
66641c2
c38bd79
 
 
66641c2
 
 
 
 
 
c38bd79
 
 
 
 
 
 
 
66641c2
 
c38bd79
66641c2
c38bd79
 
 
 
a007a27
c38bd79
 
21275ec
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
import asyncio
from contextlib import asynccontextmanager
from typing import Optional
from fastapi import APIRouter, FastAPI
from fastapi.routing import APIRouter
import httpx
from pydantic import BaseModel, Field
from playwright.async_api import async_playwright, Browser, BrowserContext, Page
import logging
import uvicorn

from scrap import PatentScrapBulkResponse, scrap_patent_async, scrap_patent_bulk_async
from serp import SerpQuery, SerpResults, query_arxiv, query_bing_search, query_brave_search, query_ddg_search, query_google_patents, query_google_scholar
from utils import log_gathered_exceptions

logging.basicConfig(
    level=logging.INFO,
    format='[%(asctime)s][%(levelname)s][%(filename)s:%(lineno)d]: %(message)s',
    datefmt='%Y-%m-%d %H:%M:%S'
)

# playwright global context
playwright = None
pw_browser: Optional[Browser] = None

# httpx client
httpx_client = httpx.AsyncClient(timeout=30, limits=httpx.Limits(
    max_connections=30, max_keepalive_connections=20))


@asynccontextmanager
async def api_lifespan(app: FastAPI):
    global playwright, pw_browser
    playwright = await async_playwright().start()
    pw_browser = await playwright.chromium.launch(headless=True)
    yield

    await pw_browser.close()
    await playwright.stop()

app = FastAPI(lifespan=api_lifespan, docs_url="/",
              title="SERPent", description=open("docs/docs.md").read())

# Router for scrapping related endpoints
scrap_router = APIRouter(prefix="/scrap", tags=["scrapping"])
# Router for SERP-scrapping related endpoints
serp_router = APIRouter(prefix="/serp", tags=["serp scrapping"])

# ===================== Search endpoints =====================


@serp_router.post("/search_scholar")
async def search_google_scholar(params: SerpQuery):
    """Queries google scholar for the specified query"""
    logging.info(f"Searching Google Scholar for queries: {params.queries}")
    results = await asyncio.gather(*[query_google_scholar(pw_browser, q, params.n_results) for q in params.queries], return_exceptions=True)
    log_gathered_exceptions(results, "google scholar search", params)

    # Filter out exceptions and flatten the results
    filtered_results = [r for r in results if not isinstance(r, Exception)]
    flattened_results = [
        item for sublist in filtered_results for item in sublist]

    # all queries failed, return the last exception
    if len(filtered_results) == 0:
        return SerpResults(results=[], error=str(results[-1]))

    return SerpResults(results=flattened_results, error=None)


@serp_router.post("/search_arxiv")
async def search_arxiv(params: SerpQuery):
    """Searches arxiv for the specified queries and returns the found documents."""
    logging.info(f"Searching Arxiv for queries: {params.queries}")
    results = await asyncio.gather(*[query_arxiv(httpx_client, q, params.n_results) for q in params.queries], return_exceptions=True)
    log_gathered_exceptions(results, "arxiv search", params)

    filtered_results = [r for r in results if not isinstance(r, Exception)]
    flattened_results = [
        item for sublist in filtered_results for item in sublist]

    if len(filtered_results) == 0:
        return SerpResults(results=[], error=str(results[-1]))

    return SerpResults(results=flattened_results, error=None)


@serp_router.post("/search_patents")
async def search_patents(params: SerpQuery) -> SerpResults:
    """Searches google patents for the specified queries and returns the found documents."""
    logging.info(f"Searching Google Patents for queries: {params.queries}")
    results = await asyncio.gather(*[query_google_patents(pw_browser, q, params.n_results) for q in params.queries], return_exceptions=True)
    log_gathered_exceptions(results, "google patent search", params)

    # Filter out exceptions and flatten the results
    filtered_results = [r for r in results if not isinstance(r, Exception)]
    flattened_results = [
        item for sublist in filtered_results for item in sublist]

    # all queries failed, return the last exception
    if len(filtered_results) == 0:
        return SerpResults(results=[], error=str(results[-1]))

    return SerpResults(results=flattened_results, error=None)


@serp_router.post("/search_brave")
async def search_brave(params: SerpQuery) -> SerpResults:
    """Searches brave search for the specified queries and returns the found documents."""
    logging.info(f"Searching Brave Search for queries: {params.queries}")
    results = await asyncio.gather(*[query_brave_search(pw_browser, q, params.n_results) for q in params.queries], return_exceptions=True)
    log_gathered_exceptions(results, "brave search", params)

    # Filter out exceptions and flatten the results
    filtered_results = [r for r in results if not isinstance(r, Exception)]
    flattened_results = [
        item for sublist in filtered_results for item in sublist]

    # all queries failed, return the last exception
    if len(filtered_results) == 0:
        return SerpResults(results=[], error=str(results[-1]))

    return SerpResults(results=flattened_results, error=None)


@serp_router.post("/search_bing")
async def search_bing(params: SerpQuery) -> SerpResults:
    """Searches Bing search for the specified queries and returns the found documents."""
    logging.info(f"Searching Bing Search for queries: {params.queries}")
    results = await asyncio.gather(*[query_bing_search(pw_browser, q, params.n_results) for q in params.queries], return_exceptions=True)
    log_gathered_exceptions(results, "bing search", params)

    # Filter out exceptions and flatten the results
    filtered_results = [r for r in results if not isinstance(r, Exception)]
    flattened_results = [
        item for sublist in filtered_results for item in sublist]

    # all queries failed, return the last exception
    if len(filtered_results) == 0:
        return SerpResults(results=[], error=str(results[-1]))

    return SerpResults(results=flattened_results, error=None)


@serp_router.post("/search_duck")
async def search_duck(params: SerpQuery) -> SerpResults:
    """Searches duckduckgo for the specified queries and returns the found documents"""
    logging.info(f"Searching DuckDuckGo for queries: {params.queries}")
    results = await asyncio.gather(*[query_ddg_search(q, params.n_results) for q in params.queries], return_exceptions=True)
    log_gathered_exceptions(results, "duckduckgo search", params)

    # Filter out exceptions and flatten the results
    filtered_results = [r for r in results if not isinstance(r, Exception)]
    flattened_results = [
        item for sublist in filtered_results for item in sublist]

    # all queries failed, return the last exception
    if len(filtered_results) == 0:
        return SerpResults(results=[], error=str(results[-1]))

    return SerpResults(results=flattened_results, error=None)


@serp_router.post("/search")
async def search(params: SerpQuery):
    """Attempts to search the specified queries using ALL backends"""
    results = []

    for q in params.queries:
        try:
            logging.info(f"Querying DDG with query: `{q}`")
            res = await query_ddg_search(q, params.n_results)
            results.extend(res)
            continue
        except Exception as e:
            logging.error(f"Failed to query DDG with query `{q}`: {e}")
            logging.info("Trying with next browser backend.")

        try:
            logging.info(f"Querying Brave Search with query: `{q}`")
            res = await query_brave_search(pw_browser, q, params.n_results)
            results.extend(res)
            continue
        except Exception as e:
            logging.error(
                f"Failed to query Brave Search with query `{q}`: {e}")
            logging.info("Trying with next browser backend.")

        try:
            logging.info(f"Querying Bing with query: `{q}`")
            res = await query_bing_search(pw_browser, q, params.n_results)
            results.extend(res)
            continue
        except Exception as e:
            logging.error(f"Failed to query Bing search with query `{q}`: {e}")
            logging.info("Trying with next browser backend.")

        if len(results) == 0:
            return SerpResults(results=[], error="All backends are rate-limited.")

    return SerpResults(results=results, error=None)

# =========================== Scrapping endpoints ===========================


# TODO: return a proper error response if the patent is not found or scrapping fails
@scrap_router.get("/scrap_patent/{patent_id}")
async def scrap_patent(patent_id: str):
    """Scraps the specified patent from Google Patents."""
    try:
        patent = await scrap_patent_async(httpx_client, f"https://patents.google.com/patent/{patent_id}/en")
        return patent
    except Exception as e:
        logging.warning(f"Failed to scrap patent {patent_id}: {e}")
        return None


class ScrapPatentsRequest(BaseModel):
    """Request model for scrapping multiple patents."""
    patent_ids: list[str] = Field(...,
                                  description="List of patent IDs to scrap")


@scrap_router.post("/scrap_patents_bulk", response_model=PatentScrapBulkResponse)
async def scrap_patents(params: ScrapPatentsRequest) -> PatentScrapBulkResponse:
    """Scraps multiple patents from Google Patents."""
    patents = await scrap_patent_bulk_async(httpx_client, params.patent_ids)
    return patents

# ===============================================================================

app.include_router(serp_router)
app.include_router(scrap_router)

uvicorn.run(app, host="0.0.0.0", port=7860)