MB-IDK commited on
Commit
8c40d9f
·
verified ·
1 Parent(s): 02feb8e

Upload 4 files

Browse files
Files changed (4) hide show
  1. Dockerfile +103 -0
  2. README.md +4 -4
  3. app.py +566 -0
  4. requirements.txt +7 -0
Dockerfile ADDED
@@ -0,0 +1,103 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # ╔══════════════════════════════════════════════════════════════╗
2
+ # ║ HIBP Pro Monitor — GOD TIER Dockerfile ║
3
+ # ║ Camoufox + Xvfb + FastAPI on HuggingFace Spaces ║
4
+ # ╚══════════════════════════════════════════════════════════════╝
5
+
6
+ FROM python:3.12-slim
7
+
8
+ # ──────────────────────────────────────────────
9
+ # 1. Dépendances système pour Firefox/Camoufox
10
+ # ──────────────────────────────────────────────
11
+ # Camoufox utilise un Firefox modifié — il a besoin des libs GTK, audio, X11
12
+ # + Xvfb pour le mode headless="virtual" (plus stealth que headless=True)
13
+ # + dbus pour éviter les erreurs de session Firefox en container
14
+ RUN apt-get update && apt-get install -y --no-install-recommends \
15
+ # === Core Firefox runtime dependencies ===
16
+ libgtk-3-0 \
17
+ libasound2 \
18
+ libx11-xcb1 \
19
+ libdbus-glib-1-2 \
20
+ libdbus-1-3 \
21
+ libxt6 \
22
+ libxrender1 \
23
+ libxcomposite1 \
24
+ libxdamage1 \
25
+ libxrandr2 \
26
+ libxcursor1 \
27
+ libxi6 \
28
+ libxtst6 \
29
+ libpango-1.0-0 \
30
+ libcairo2 \
31
+ libcairo-gobject2 \
32
+ libgdk-pixbuf2.0-0 \
33
+ libglib2.0-0 \
34
+ libfontconfig1 \
35
+ libfreetype6 \
36
+ libstdc++6 \
37
+ # === Virtual display for headless="virtual" ===
38
+ xvfb \
39
+ # === D-Bus (empêche les erreurs Firefox en container) ===
40
+ dbus \
41
+ dbus-x11 \
42
+ # === Fonts (pour que les pages se rendent correctement) ===
43
+ fonts-liberation \
44
+ fonts-noto-core \
45
+ # === Utils ===
46
+ ca-certificates \
47
+ curl \
48
+ && rm -rf /var/lib/apt/lists/* \
49
+ && apt-get clean
50
+
51
+ # ──────────────────────────────────────────────
52
+ # 2. D-Bus setup (évite les crash Firefox dans Docker)
53
+ # ──────────────────────────────────────────────
54
+ RUN mkdir -p /run/dbus && dbus-uuidgen > /etc/machine-id
55
+
56
+ # ──────────────────────────────────────────────
57
+ # 3. Créer un user non-root (requis par HF Spaces)
58
+ # ──────────────────────────────────────────────
59
+ RUN useradd -m -u 1000 user
60
+ WORKDIR /app
61
+
62
+ # ──────────────────────────────────────────────
63
+ # 4. Installation Python + Camoufox browser
64
+ # ──────────────────────────────────────────────
65
+ COPY --chown=user requirements.txt .
66
+ RUN pip install --no-cache-dir --upgrade pip && \
67
+ pip install --no-cache-dir -r requirements.txt
68
+
69
+ # Fetch Camoufox browser binary + fingerprint data + addons
70
+ # Ceci télécharge ~120MB de données dans ~/.cache/camoufox/
71
+ RUN python -m camoufox fetch
72
+
73
+ # ──────────────────────────────────────────────
74
+ # 5. Copier le code
75
+ # ──────────────────────────────────────────────
76
+ COPY --chown=user . /app
77
+
78
+ # ──────────────────────────────────────────────
79
+ # 6. Permissions pour le user non-root
80
+ # ──────────────────────────────────────────────
81
+ # Camoufox cache est dans /root/.cache par défaut quand
82
+ # installé en root — on le copie pour le user
83
+ RUN cp -r /root/.cache/camoufox /home/user/.cache/camoufox 2>/dev/null || true && \
84
+ chown -R user:user /home/user/.cache 2>/dev/null || true && \
85
+ # Permissions Xvfb
86
+ chmod 1777 /tmp
87
+
88
+ # ──────────────────────────────────────────────
89
+ # 7. Entrypoint
90
+ # ──────────────────────────────────────────────
91
+ USER user
92
+ ENV HOME=/home/user \
93
+ PATH=/home/user/.local/bin:$PATH \
94
+ # Empêche les warnings Python en container
95
+ PYTHONUNBUFFERED=1 \
96
+ PYTHONDONTWRITEBYTECODE=1 \
97
+ # Camoufox cherchera son cache ici
98
+ XDG_CACHE_HOME=/home/user/.cache
99
+
100
+ EXPOSE 7860
101
+
102
+ # Lancement avec dbus-run-session pour que Firefox ait un bus D-Bus
103
+ CMD ["dbus-run-session", "python", "app.py"]
README.md CHANGED
@@ -1,8 +1,8 @@
1
  ---
2
- title: HIBP2
3
- emoji: 🦀
4
- colorFrom: red
5
- colorTo: pink
6
  sdk: docker
7
  pinned: false
8
  ---
 
1
  ---
2
+ title: HIBP
3
+ emoji:
4
+ colorFrom: gray
5
+ colorTo: red
6
  sdk: docker
7
  pinned: false
8
  ---
app.py ADDED
@@ -0,0 +1,566 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ ╔══════════════════════════════════════════════════════════════════╗
3
+ ║ HIBP PRO MONITOR — GOD TIER EDITION ║
4
+ ║ Camoufox + Async + Smart Proxy Pool + Stealth Maximum ║
5
+ ╚══════════════════════════════════════════════════════════════════╝
6
+ """
7
+
8
+ from fastapi import FastAPI, Body, HTTPException, Header, Request
9
+ from fastapi.responses import JSONResponse
10
+ from fastapi.middleware.cors import CORSMiddleware
11
+ import gradio as gr
12
+ import asyncio
13
+ import random
14
+ import time
15
+ import hmac
16
+ import hashlib
17
+ import logging
18
+ from typing import Optional, List, Dict, Any
19
+ from datetime import datetime, timedelta
20
+ from dataclasses import dataclass, field
21
+ from contextlib import asynccontextmanager
22
+
23
+ import aiohttp
24
+ from camoufox.async_api import AsyncCamoufox
25
+ import uvicorn
26
+ import nest_asyncio
27
+
28
+ nest_asyncio.apply()
29
+
30
+ # ================================================================
31
+ # LOGGING — Clean & structured
32
+ # ================================================================
33
+ logging.basicConfig(
34
+ level=logging.INFO,
35
+ format="%(asctime)s │ %(levelname)-7s │ %(message)s",
36
+ datefmt="%H:%M:%S"
37
+ )
38
+ log = logging.getLogger("hibp")
39
+
40
+ # ================================================================
41
+ # CONFIGURATION
42
+ # ================================================================
43
+
44
+ class Config:
45
+ # --- API ---
46
+ API_KEY: str = "CHANGE_ME_TO_A_STRONG_SECRET_KEY"
47
+ HOST: str = "0.0.0.0"
48
+ PORT: int = 7860
49
+
50
+ # --- Proxy ---
51
+ PROXY_API_BASE: str = "https://voxxium-proxpy.hf.space"
52
+ PROXY_FETCH_LIMIT: int = 80
53
+ PROXY_TEST_TIMEOUT: float = 6.0
54
+ PROXY_CACHE_TTL: int = 180 # seconds avant refresh du pool
55
+ PROXY_MAX_CONCURRENT_TESTS: int = 50
56
+
57
+ # --- Scraper ---
58
+ MAX_RETRIES: int = 8 # tentatives par email
59
+ MAX_CONCURRENT_EMAILS: int = 3 # emails scrappés en parallèle
60
+ PAGE_TIMEOUT: int = 35_000 # ms
61
+ NAVIGATION_TIMEOUT: int = 45_000 # ms
62
+
63
+ # --- Rate Limiting ---
64
+ RATE_LIMIT_WINDOW: int = 60 # seconds
65
+ RATE_LIMIT_MAX: int = 30 # max requests par fenêtre
66
+
67
+
68
+ # ================================================================
69
+ # RATE LIMITER — In-memory sliding window
70
+ # ================================================================
71
+
72
+ class RateLimiter:
73
+ def __init__(self, window: int, max_requests: int):
74
+ self.window = window
75
+ self.max_requests = max_requests
76
+ self._requests: Dict[str, List[float]] = {}
77
+
78
+ def is_allowed(self, key: str) -> bool:
79
+ now = time.time()
80
+ if key not in self._requests:
81
+ self._requests[key] = []
82
+ # Nettoyage des vieilles entrées
83
+ self._requests[key] = [
84
+ t for t in self._requests[key] if t > now - self.window
85
+ ]
86
+ if len(self._requests[key]) >= self.max_requests:
87
+ return False
88
+ self._requests[key].append(now)
89
+ return True
90
+
91
+
92
+ # ================================================================
93
+ # PROXY POOL — Async, cached, sorted by latency
94
+ # ================================================================
95
+
96
+ @dataclass
97
+ class ProxyPool:
98
+ _proxies: List[Dict[str, Any]] = field(default_factory=list)
99
+ _last_refresh: float = 0.0
100
+ _lock: asyncio.Lock = field(default_factory=asyncio.Lock)
101
+
102
+ @property
103
+ def is_stale(self) -> bool:
104
+ return time.time() - self._last_refresh > Config.PROXY_CACHE_TTL
105
+
106
+ @property
107
+ def urls(self) -> List[str]:
108
+ return [p["url"] for p in self._proxies]
109
+
110
+ async def _test_single(
111
+ self, session: aiohttp.ClientSession, proxy_url: str
112
+ ) -> Optional[Dict]:
113
+ """Test un proxy via aiohttp — non bloquant."""
114
+ try:
115
+ start = time.monotonic()
116
+ async with session.get(
117
+ "https://api.ipify.org?format=json",
118
+ proxy=proxy_url,
119
+ timeout=aiohttp.ClientTimeout(total=Config.PROXY_TEST_TIMEOUT),
120
+ ssl=False,
121
+ ) as resp:
122
+ if resp.status == 200:
123
+ latency = round(time.monotonic() - start, 3)
124
+ return {"url": proxy_url, "latency": latency}
125
+ except Exception:
126
+ return None
127
+
128
+ async def refresh(self) -> List[str]:
129
+ """Fetch + test tous les proxies en async parallel."""
130
+ async with self._lock:
131
+ if not self.is_stale and self._proxies:
132
+ return self.urls
133
+
134
+ log.info("🔄 Proxy pool refresh started...")
135
+
136
+ try:
137
+ async with aiohttp.ClientSession() as session:
138
+ # 1) Fetch la liste
139
+ async with session.get(
140
+ f"{Config.PROXY_API_BASE}/all",
141
+ params={
142
+ "protocol": "http",
143
+ "verified": "true",
144
+ "limit": Config.PROXY_FETCH_LIMIT,
145
+ },
146
+ timeout=aiohttp.ClientTimeout(total=15),
147
+ ) as resp:
148
+ data = await resp.json()
149
+
150
+ items = data if isinstance(data, list) else data.get("proxies", [])
151
+ raw = []
152
+ for p in items:
153
+ url = p.get("proxy_url") or p.get("proxy") or p.get("url")
154
+ if url:
155
+ raw.append(url)
156
+
157
+ log.info(f" Fetched {len(raw)} raw proxies, testing...")
158
+
159
+ # 2) Test en parallèle avec semaphore
160
+ sem = asyncio.Semaphore(Config.PROXY_MAX_CONCURRENT_TESTS)
161
+
162
+ async def _bounded_test(proxy_url: str):
163
+ async with sem:
164
+ return await self._test_single(session, proxy_url)
165
+
166
+ results = await asyncio.gather(
167
+ *[_bounded_test(p) for p in raw],
168
+ return_exceptions=True,
169
+ )
170
+
171
+ working = [r for r in results if isinstance(r, dict) and r is not None]
172
+ working.sort(key=lambda x: x["latency"])
173
+
174
+ self._proxies = working
175
+ self._last_refresh = time.time()
176
+
177
+ log.info(f" ✅ {len(working)} working proxies (best: {working[0]['latency']}s)" if working else " ⚠️ No working proxies found")
178
+ return self.urls
179
+
180
+ except Exception as e:
181
+ log.error(f" ❌ Proxy refresh failed: {e}")
182
+ return self.urls # retourne le cache existant
183
+
184
+
185
+ # ================================================================
186
+ # HUMAN BEHAVIOR SIMULATION
187
+ # ================================================================
188
+
189
+ async def human_type(page, selector: str, text: str):
190
+ """Frappe au clavier comme un humain réel — timing variable, erreurs possibles."""
191
+ locator = page.locator(selector).first
192
+ await locator.wait_for(state="visible", timeout=15_000)
193
+
194
+ # Click avec petit offset aléatoire
195
+ box = await locator.bounding_box()
196
+ if box:
197
+ x = box["x"] + box["width"] * random.uniform(0.2, 0.8)
198
+ y = box["y"] + box["height"] * random.uniform(0.3, 0.7)
199
+ await page.mouse.click(x, y)
200
+ else:
201
+ await locator.click()
202
+
203
+ await asyncio.sleep(random.uniform(0.3, 0.8))
204
+
205
+ for i, char in enumerate(text):
206
+ # Variation de vitesse : plus lent au début, accélère au milieu
207
+ base_delay = random.uniform(45, 140)
208
+
209
+ # Micro-pauses aléatoires (comme un humain qui réfléchit)
210
+ if random.random() < 0.08:
211
+ await asyncio.sleep(random.uniform(0.2, 0.6))
212
+
213
+ await locator.type(char, delay=base_delay)
214
+
215
+
216
+ async def human_delay(min_s: float = 1.0, max_s: float = 3.0):
217
+ """Pause humanisée."""
218
+ await asyncio.sleep(random.uniform(min_s, max_s))
219
+
220
+
221
+ # ================================================================
222
+ # BREACH EXTRACTOR
223
+ # ================================================================
224
+
225
+ async def extract_breaches(page) -> List[Dict]:
226
+ """Extraction robuste des brèches depuis la timeline HIBP."""
227
+ breaches = []
228
+ try:
229
+ log.info(" 📋 Waiting for breach timeline...")
230
+ await page.wait_for_selector(".timeline-item", timeout=20_000)
231
+ await asyncio.sleep(1.5) # Animation render
232
+
233
+ items = await page.locator(".timeline-item").all()
234
+
235
+ for item in items:
236
+ try:
237
+ # Extraction parallèle des champs
238
+ name_p = item.locator(".timeline-title h5").inner_text()
239
+ date_p = item.locator(".timeline-date-text").all_inner_texts()
240
+ desc_p = item.locator(".timeline-content p").first.inner_text(timeout=5_000)
241
+ comp_p = item.locator(".timeline-details-list li").all_inner_texts()
242
+
243
+ name, date_texts, desc, comp = await asyncio.gather(
244
+ name_p, date_p, desc_p, comp_p,
245
+ return_exceptions=True
246
+ )
247
+
248
+ breaches.append({
249
+ "name": name.strip() if isinstance(name, str) else "Unknown",
250
+ "date": " ".join(date_texts).strip() if isinstance(date_texts, list) else "",
251
+ "description": desc.strip() if isinstance(desc, str) else "",
252
+ "compromised": [
253
+ x.strip() for x in (comp if isinstance(comp, list) else []) if x.strip()
254
+ ],
255
+ })
256
+ except Exception as e:
257
+ log.debug(f" ⚠️ Item extraction error: {e}")
258
+ continue
259
+
260
+ log.info(f" ✅ {len(breaches)} breaches extracted")
261
+
262
+ except Exception as e:
263
+ log.warning(f" ❌ Timeline extraction failed: {e}")
264
+
265
+ return breaches
266
+
267
+
268
+ # ================================================================
269
+ # CORE SCRAPER ENGINE — Camoufox powered
270
+ # ================================================================
271
+
272
+ async def check_single_email(
273
+ email: str,
274
+ proxy_pool: ProxyPool,
275
+ use_proxy: bool = True,
276
+ ) -> Dict:
277
+ """
278
+ Vérifie un email sur HIBP avec Camoufox.
279
+ Retry automatique avec rotation de proxy.
280
+ """
281
+ log.info(f"🔍 Checking: {email}")
282
+
283
+ proxies = []
284
+ if use_proxy:
285
+ proxy_urls = await proxy_pool.refresh()
286
+ proxies = random.sample(proxy_urls, min(Config.MAX_RETRIES - 1, len(proxy_urls)))
287
+
288
+ # Toujours terminer par une tentative directe (sans proxy)
289
+ attempts = proxies + [None]
290
+
291
+ for attempt_num, proxy_url in enumerate(attempts, 1):
292
+ proxy_label = proxy_url or "DIRECT"
293
+ log.info(f" [{attempt_num}/{len(attempts)}] via {proxy_label}")
294
+
295
+ browser = None
296
+ try:
297
+ # ═══════════════════════════════════════
298
+ # CAMOUFOX LAUNCH — Anti-detect maximum
299
+ # ═══════════════════════════════════════
300
+ launch_kwargs = {
301
+ "headless": True,
302
+ "humanize": True, # Mouse movement naturel intégré
303
+ "block_images": True, # Économise bande passante + vitesse
304
+ "block_webrtc": True, # Empêche les leaks d'IP
305
+ "os": ["windows", "macos"], # Fingerprint réaliste (pas linux = rare en vrai trafic)
306
+ "i_know_what_im_doing": True,
307
+ }
308
+
309
+ if proxy_url:
310
+ launch_kwargs["proxy"] = {"server": proxy_url}
311
+
312
+ async with AsyncCamoufox(**launch_kwargs) as browser:
313
+ page = await browser.new_page()
314
+
315
+ # Timeouts
316
+ page.set_default_timeout(Config.PAGE_TIMEOUT)
317
+ page.set_default_navigation_timeout(Config.NAVIGATION_TIMEOUT)
318
+
319
+ # ── Block unnecessary resources pour la SPEED ──
320
+ await page.route(
321
+ "**/*",
322
+ lambda route: (
323
+ route.abort()
324
+ if route.request.resource_type in ("image", "media", "font", "stylesheet")
325
+ else route.continue_()
326
+ ),
327
+ )
328
+
329
+ # ── Navigation ──
330
+ await page.goto(
331
+ "https://haveibeenpwned.com/",
332
+ wait_until="domcontentloaded",
333
+ )
334
+ await human_delay(2.0, 4.5)
335
+
336
+ # ── Saisie email humaine ──
337
+ await human_type(page, 'input[type="email"], #emailInput', email)
338
+ await human_delay(0.5, 1.5)
339
+
340
+ # ── Click sur le bouton ──
341
+ btn = page.locator('#checkButton, button[type="submit"]').first
342
+ await btn.click()
343
+
344
+ # ── Attente du résultat ──
345
+ await page.wait_for_selector(
346
+ '#email-result-good:not(.d-none), #email-result-bad:not(.d-none)',
347
+ timeout=40_000,
348
+ )
349
+
350
+ is_safe = await page.locator('#email-result-good:not(.d-none)').count() > 0
351
+
352
+ if is_safe:
353
+ log.info(f" ✅ SAFE — {email}")
354
+ return {
355
+ "email": email,
356
+ "pwned": False,
357
+ "breach_count": 0,
358
+ "breaches": [],
359
+ "checked_at": datetime.utcnow().isoformat(),
360
+ }
361
+ else:
362
+ breaches = await extract_breaches(page)
363
+ log.info(f" 🔴 PWNED — {email} ({len(breaches)} breaches)")
364
+ return {
365
+ "email": email,
366
+ "pwned": True,
367
+ "breach_count": len(breaches),
368
+ "breaches": breaches,
369
+ "checked_at": datetime.utcnow().isoformat(),
370
+ }
371
+
372
+ except Exception as e:
373
+ log.warning(f" ⚠️ Attempt {attempt_num} failed: {str(e)[:120]}")
374
+ continue
375
+
376
+ # Toutes les tentatives échouées
377
+ log.error(f" 💀 ALL ATTEMPTS FAILED for {email}")
378
+ return {
379
+ "email": email,
380
+ "pwned": None,
381
+ "error": "All attempts failed",
382
+ "checked_at": datetime.utcnow().isoformat(),
383
+ }
384
+
385
+
386
+ # ================================================================
387
+ # APPLICATION SETUP
388
+ # ================================================================
389
+
390
+ proxy_pool = ProxyPool()
391
+ rate_limiter = RateLimiter(Config.RATE_LIMIT_WINDOW, Config.RATE_LIMIT_MAX)
392
+
393
+ @asynccontextmanager
394
+ async def lifespan(app: FastAPI):
395
+ """Pre-warm proxy pool on startup."""
396
+ log.info("🚀 HIBP Pro Monitor — GOD TIER EDITION starting...")
397
+ await proxy_pool.refresh()
398
+ yield
399
+ log.info("👋 Shutting down...")
400
+
401
+ app = FastAPI(
402
+ title="HIBP Pro Monitor — GOD TIER",
403
+ version="2.0.0",
404
+ lifespan=lifespan,
405
+ )
406
+
407
+ app.add_middleware(
408
+ CORSMiddleware,
409
+ allow_origins=["*"],
410
+ allow_methods=["POST", "GET"],
411
+ allow_headers=["*"],
412
+ )
413
+
414
+
415
+ # ================================================================
416
+ # AUTH — Timing-safe token comparison
417
+ # ================================================================
418
+
419
+ def verify_token(token: Optional[str]) -> bool:
420
+ if not token:
421
+ return False
422
+ return hmac.compare_digest(token, Config.API_KEY)
423
+
424
+
425
+ # ================================================================
426
+ # API ENDPOINTS
427
+ # ================================================================
428
+
429
+ @app.post("/check")
430
+ async def api_check(
431
+ request: Request,
432
+ payload: dict = Body(...),
433
+ x_token: str = Header(None),
434
+ ):
435
+ # ── Auth ──
436
+ if not verify_token(x_token):
437
+ log.warning(f"🚫 Auth failed from {request.client.host}")
438
+ raise HTTPException(status_code=403, detail="Invalid or missing token")
439
+
440
+ # ── Rate Limiting ──
441
+ client_ip = request.client.host
442
+ if not rate_limiter.is_allowed(client_ip):
443
+ raise HTTPException(status_code=429, detail="Rate limit exceeded")
444
+
445
+ # ── Validation ──
446
+ emails = payload.get("emails", [])
447
+ use_proxy = payload.get("use_proxy", True)
448
+
449
+ if not emails:
450
+ raise HTTPException(status_code=400, detail="No emails provided")
451
+ if len(emails) > 20:
452
+ raise HTTPException(status_code=400, detail="Max 20 emails per request")
453
+
454
+ # ── Traitement avec semaphore pour parallélisme contrôlé ──
455
+ sem = asyncio.Semaphore(Config.MAX_CONCURRENT_EMAILS)
456
+
457
+ async def _bounded_check(email: str):
458
+ async with sem:
459
+ return await check_single_email(email, proxy_pool, use_proxy)
460
+
461
+ results = await asyncio.gather(
462
+ *[_bounded_check(e.strip()) for e in emails if e.strip()],
463
+ )
464
+
465
+ # Vérification des erreurs critiques
466
+ failed = [r for r in results if r.get("pwned") is None]
467
+
468
+ return JSONResponse(
469
+ content={
470
+ "results": results,
471
+ "total": len(results),
472
+ "failed": len(failed),
473
+ "timestamp": datetime.utcnow().isoformat(),
474
+ },
475
+ status_code=200 if not failed else 207, # 207 = Multi-Status
476
+ )
477
+
478
+
479
+ @app.get("/health")
480
+ async def health():
481
+ return {
482
+ "status": "operational",
483
+ "proxy_pool_size": len(proxy_pool._proxies),
484
+ "proxy_pool_age": round(time.time() - proxy_pool._last_refresh, 1),
485
+ "uptime": "ok",
486
+ }
487
+
488
+
489
+ @app.post("/proxies/refresh")
490
+ async def force_proxy_refresh(x_token: str = Header(None)):
491
+ if not verify_token(x_token):
492
+ raise HTTPException(status_code=403, detail="Unauthorized")
493
+ proxy_pool._last_refresh = 0 # Force stale
494
+ urls = await proxy_pool.refresh()
495
+ return {"refreshed": len(urls)}
496
+
497
+
498
+ # ================================================================
499
+ # GRADIO UI
500
+ # ================================================================
501
+
502
+ async def gradio_check(txt: str, use_proxies: bool) -> str:
503
+ if not txt.strip():
504
+ return "⚠️ Enter at least one email."
505
+
506
+ emails = [e.strip() for e in txt.splitlines() if e.strip()]
507
+ lines = []
508
+
509
+ for email in emails[:10]: # Limite UI
510
+ result = await check_single_email(email, proxy_pool, use_proxies)
511
+
512
+ if result.get("pwned") is None:
513
+ lines.append(f"⚫ {email} — ERROR: {result.get('error')}")
514
+ elif result["pwned"]:
515
+ breach_names = ", ".join(b["name"] for b in result["breaches"][:5])
516
+ lines.append(
517
+ f"🔴 {email} — PWNED ({result['breach_count']} breaches: {breach_names})"
518
+ )
519
+ else:
520
+ lines.append(f"🟢 {email} — SAFE ✅")
521
+
522
+ return "\n".join(lines)
523
+
524
+
525
+ with gr.Blocks(title="HIBP Pro Monitor", theme=gr.themes.Soft()) as demo:
526
+ gr.Markdown(
527
+ """
528
+ # 🛡️ HIBP Pro Monitor — GOD TIER
529
+ **Camoufox-powered** breach detection with stealth anti-fingerprinting.
530
+ """
531
+ )
532
+
533
+ with gr.Row():
534
+ with gr.Column(scale=2):
535
+ emails_input = gr.Textbox(
536
+ lines=6,
537
+ label="📧 Emails (one per line)",
538
+ placeholder="john@example.com\njane@test.com",
539
+ )
540
+ proxy_toggle = gr.Checkbox(label="🌐 Use Proxy Rotation", value=True)
541
+ check_btn = gr.Button("🔍 Check Breaches", variant="primary", size="lg")
542
+
543
+ with gr.Column(scale=3):
544
+ output = gr.Textbox(
545
+ label="📊 Results",
546
+ lines=10,
547
+ interactive=False,
548
+ )
549
+
550
+ check_btn.click(gradio_check, [emails_input, proxy_toggle], output)
551
+
552
+ app = gr.mount_gradio_app(app, demo, path="/")
553
+
554
+
555
+ # ================================================================
556
+ # ENTRYPOINT
557
+ # ================================================================
558
+
559
+ if __name__ == "__main__":
560
+ uvicorn.run(
561
+ app,
562
+ host=Config.HOST,
563
+ port=Config.PORT,
564
+ log_level="info",
565
+ access_log=False, # On a notre propre logging
566
+ )
requirements.txt ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ fastapi
2
+ uvicorn[standard]
3
+ gradio
4
+ aiohttp
5
+ nest_asyncio
6
+ camoufox[geoip]
7
+ browserforge