Implement rate limiting and error handling in FastAPI application; update requirements.txt to include 'limits'
Browse files- app.py +21 -1
- requirements.txt +2 -1
app.py
CHANGED
|
@@ -14,14 +14,34 @@ export SERPER_API_KEY="YOUR‑KEY‑HERE"
|
|
| 14 |
import os, json, asyncio, httpx, trafilatura, gradio as gr
|
| 15 |
from dateutil import parser as dateparser
|
| 16 |
from pathlib import Path
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 17 |
|
| 18 |
SERPER_API_KEY = os.getenv("SERPER_API_KEY")
|
| 19 |
SERPER_ENDPOINT = "https://google.serper.dev/news"
|
| 20 |
HEADERS = {"X-API-KEY": SERPER_API_KEY, "Content-Type": "application/json"}
|
| 21 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 22 |
|
| 23 |
### 1 ─ Serper call -------------------------------------------------------------
|
|
|
|
| 24 |
async def get_serper_news(query: str, num: int = 4) -> list[dict]:
|
|
|
|
|
|
|
|
|
|
| 25 |
payload = {"q": query, "type": "news", "num": num, "page": 1}
|
| 26 |
async with httpx.AsyncClient(timeout=15) as client:
|
| 27 |
resp = await client.post(SERPER_ENDPOINT, headers=HEADERS, json=payload)
|
|
@@ -30,7 +50,7 @@ async def get_serper_news(query: str, num: int = 4) -> list[dict]:
|
|
| 30 |
|
| 31 |
|
| 32 |
### 2 ─ Concurrent HTML downloads ----------------------------------------------
|
| 33 |
-
async def fetch_html_many(urls: list[str]) -> list[
|
| 34 |
async with httpx.AsyncClient(timeout=20, follow_redirects=True) as client:
|
| 35 |
tasks = [client.get(u) for u in urls]
|
| 36 |
responses = await asyncio.gather(*tasks, return_exceptions=True)
|
|
|
|
| 14 |
import os, json, asyncio, httpx, trafilatura, gradio as gr
|
| 15 |
from dateutil import parser as dateparser
|
| 16 |
from pathlib import Path
|
| 17 |
+
from limits import RateLimitItem, parse
|
| 18 |
+
from limits.aio.storage import MemoryStorage
|
| 19 |
+
from limits.aio.strategies import MovingWindowRateLimiter
|
| 20 |
+
from fastapi import FastAPI, Request, HTTPException
|
| 21 |
+
from fastapi.responses import JSONResponse
|
| 22 |
|
| 23 |
SERPER_API_KEY = os.getenv("SERPER_API_KEY")
|
| 24 |
SERPER_ENDPOINT = "https://google.serper.dev/news"
|
| 25 |
HEADERS = {"X-API-KEY": SERPER_API_KEY, "Content-Type": "application/json"}
|
| 26 |
|
| 27 |
+
# Rate limiting
|
| 28 |
+
app = FastAPI()
|
| 29 |
+
storage = MemoryStorage()
|
| 30 |
+
limiter = MovingWindowRateLimiter(storage)
|
| 31 |
+
rate_limit = parse("200/hour")
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
@app.exception_handler(HTTPException)
|
| 35 |
+
async def http_exception_handler(request: Request, exc: HTTPException):
|
| 36 |
+
return JSONResponse(status_code=exc.status_code, content={"message": exc.detail})
|
| 37 |
+
|
| 38 |
|
| 39 |
### 1 ─ Serper call -------------------------------------------------------------
|
| 40 |
+
@app.post("/serper-news")
|
| 41 |
async def get_serper_news(query: str, num: int = 4) -> list[dict]:
|
| 42 |
+
if not await limiter.hit(rate_limit, "global"):
|
| 43 |
+
raise HTTPException(status_code=429, detail="Too Many Requests")
|
| 44 |
+
|
| 45 |
payload = {"q": query, "type": "news", "num": num, "page": 1}
|
| 46 |
async with httpx.AsyncClient(timeout=15) as client:
|
| 47 |
resp = await client.post(SERPER_ENDPOINT, headers=HEADERS, json=payload)
|
|
|
|
| 50 |
|
| 51 |
|
| 52 |
### 2 ─ Concurrent HTML downloads ----------------------------------------------
|
| 53 |
+
async def fetch_html_many(urls: list[str]) -> list[dict]:
|
| 54 |
async with httpx.AsyncClient(timeout=20, follow_redirects=True) as client:
|
| 55 |
tasks = [client.get(u) for u in urls]
|
| 56 |
responses = await asyncio.gather(*tasks, return_exceptions=True)
|
requirements.txt
CHANGED
|
@@ -1,4 +1,5 @@
|
|
| 1 |
gradio
|
| 2 |
httpx
|
| 3 |
trafilatura
|
| 4 |
-
python-dateutil
|
|
|
|
|
|
| 1 |
gradio
|
| 2 |
httpx
|
| 3 |
trafilatura
|
| 4 |
+
python-dateutil
|
| 5 |
+
limits
|