import requests
from bs4 import BeautifulSoup
import aiohttp
import asyncio

def fetch_proxies():
    url = "https://www.xicidaili.com/nn/"
    headers = {'User-Agent': 'Mozilla/5.0'}

    response = requests.get(url, headers=headers)
    soup = BeautifulSoup(response.text, 'lxml')

    proxies = []
    for row in soup.select('#ip_list tr')[1:]:
        cells = row.find_all('td')
        if len(cells) >= 6:
            ip = cells[1].text
            port = cells[2].text
            protocol = cells[5].text.lower()
            proxies.append(f"{protocol}://{ip}:{port}")

    return proxies

async def check_proxy(proxy):
    try:
        async with aiohttp.ClientSession() as session:
            async with session.get('http://httpbin.org/ip',
                                proxy=proxy,
                                timeout=5) as response:
                if response.status == 200:
                    return True
    except:
        return False

async def validate_proxies(proxies):
    tasks = [check_proxy(proxy) for proxy in proxies]
    results = await asyncio.gather(*tasks)
    return [proxy for proxy, valid in zip(proxies, results) if valid]

if __name__ == '__main__':
    fetch_proxies()