garvitcpp commited on
Commit
612285a
·
verified ·
1 Parent(s): 247e891

Create browser_utils.py

Browse files
Files changed (1) hide show
  1. services/utils/browser_utils.py +126 -0
services/utils/browser_utils.py ADDED
@@ -0,0 +1,126 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from playwright.async_api import async_playwright
2
+ import asyncio
3
+ import random
4
+ import logging
5
+ from typing import Optional, List
6
+
7
+ logger = logging.getLogger(__name__)
8
+
9
+ # Your WebShare proxies
10
+ WEBSHARE_PROXIES = [
11
+ "198.23.239.134:6540:zvubytfw:ak6yit5k2tvj",
12
+ "207.244.217.165:6712:zvubytfw:ak6yit5k2tvj",
13
+ "107.172.163.27:6543:zvubytfw:ak6yit5k2tvj",
14
+ "161.123.152.115:6360:zvubytfw:ak6yit5k2tvj",
15
+ "23.94.138.75:6349:zvubytfw:ak6yit5k2tvj",
16
+ "216.10.27.159:6837:zvubytfw:ak6yit5k2tvj",
17
+ "136.0.207.84:6661:zvubytfw:ak6yit5k2tvj",
18
+ "64.64.118.149:6732:zvubytfw:ak6yit5k2tvj",
19
+ "142.147.128.93:6593:zvubytfw:ak6yit5k2tvj",
20
+ "154.36.110.199:6853:zvubytfw:ak6yit5k2tvj"
21
+ ]
22
+
23
+ # Track proxy failures
24
+ proxy_failures = {}
25
+
26
+ def get_random_proxy() -> List[str]:
27
+ """Get a random proxy from the list, avoiding those with failures"""
28
+ available_proxies = [p for p in WEBSHARE_PROXIES if proxy_failures.get(p, 0) < 3]
29
+ if not available_proxies:
30
+ # Reset failures if all proxies have failed
31
+ for proxy in WEBSHARE_PROXIES:
32
+ proxy_failures[proxy] = 0
33
+ available_proxies = WEBSHARE_PROXIES
34
+
35
+ return random.choice(available_proxies)
36
+
37
+ def mark_proxy_failure(proxy_str: str) -> None:
38
+ """Mark a proxy as failing"""
39
+ proxy_failures[proxy_str] = proxy_failures.get(proxy_str, 0) + 1
40
+ logger.warning(f"Marked proxy as failed: {proxy_str} (failure count: {proxy_failures[proxy_str]})")
41
+
42
+ if proxy_failures[proxy_str] >= 3:
43
+ logger.warning(f"Proxy {proxy_str} has failed multiple times, will not use for 5 minutes")
44
+ asyncio.create_task(reset_proxy_after_delay(proxy_str))
45
+
46
+ async def reset_proxy_after_delay(proxy_str: str) -> None:
47
+ """Reset a proxy's failure count after a delay"""
48
+ await asyncio.sleep(300) # 5 minutes
49
+ if proxy_str in proxy_failures:
50
+ proxy_failures[proxy_str] = 0
51
+ logger.info(f"Reset failure count for proxy: {proxy_str}")
52
+
53
+ async def fetch_page_with_browser(url: str, user_agent: str) -> Optional[str]:
54
+ """Fetch a page using Playwright with a proxy"""
55
+ logger.info(f"Requesting URL with browser: {url}")
56
+
57
+ # Try up to 2 different proxies
58
+ for attempt in range(2):
59
+ proxy_str = get_random_proxy()
60
+ ip, port, username, password = proxy_str.split(':')
61
+
62
+ logger.info(f"Using proxy {ip}:{port} (attempt {attempt+1})")
63
+
64
+ try:
65
+ async with async_playwright() as p:
66
+ browser = await p.chromium.launch(
67
+ headless=True,
68
+ proxy={
69
+ "server": f"http://{ip}:{port}",
70
+ "username": username,
71
+ "password": password
72
+ }
73
+ )
74
+
75
+ # Create context with realistic settings
76
+ context = await browser.new_context(
77
+ viewport={"width": 1920, "height": 1080},
78
+ user_agent=user_agent
79
+ )
80
+
81
+ # Apply stealth mode
82
+ await context.add_init_script("""
83
+ Object.defineProperty(navigator, 'webdriver', {
84
+ get: () => false,
85
+ });
86
+ """)
87
+
88
+ # Create page and navigate
89
+ page = await context.new_page()
90
+ response = await page.goto(url, wait_until="networkidle", timeout=30000)
91
+
92
+ if response and response.status in [200, 202]:
93
+ # Wait a bit for any dynamic content to load
94
+ await asyncio.sleep(3)
95
+
96
+ # Get the page HTML
97
+ html = await page.content()
98
+
99
+ # Check if we got proper content
100
+ if len(html) > 5000 and ("<html" in html or "<!DOCTYPE" in html):
101
+ if "searchresults" in url or "search" in url:
102
+ # For search pages, ensure we have property listings
103
+ await page.wait_for_timeout(2000) # Wait a bit longer for search results
104
+ has_results = await page.query_selector("[data-testid='property-card'], .sr_property_block, .sr_item")
105
+ if has_results:
106
+ logger.info(f"Successfully retrieved search results ({len(html)} bytes)")
107
+ await browser.close()
108
+ return html
109
+ else:
110
+ logger.warning("No property cards found in search results")
111
+ else:
112
+ # For hotel detail pages, just return the content
113
+ logger.info(f"Successfully retrieved content ({len(html)} bytes)")
114
+ await browser.close()
115
+ return html
116
+
117
+ await browser.close()
118
+ except Exception as e:
119
+ logger.error(f"Browser request failed: {str(e)}")
120
+ mark_proxy_failure(proxy_str)
121
+
122
+ # Wait before trying another proxy
123
+ await asyncio.sleep(2)
124
+
125
+ logger.error("All browser attempts failed")
126
+ return None