import aiohttp
 
async def fetch_data(url):
    async with aiohttp.ClientSession() as session:
        async with session.get(url) as response:
            return await response.json()



import asyncio
import aiohttp
from aiohttp_socks import ProxyConnector
import random

urls = ["https://chengdu.anjuke.com/sale/?from=HomePage_TopBar", "https://chengdu.anjuke.com/sale/?from=HomePage_TopBar"]
proxies = ["socks5://16yun:16ip@www.16yun.cn:8888", "socks5://16yun:16ip@www.16yun.cn:11111", "socks5://username:password@host3:port3"]

user_agents = [
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 11_6_1) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/15.2 Safari/605.1.15",
"Mozilla/5.0 (iPhone; CPU iPhone OS 15_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/15 Mobile/15E148 Safari/604.1"
]

sync def main():
# 创建一个空列表，用来存储所有的协程任务
tasks = []
# 循环遍历目标网站列表，每次创建一个fetch函数的协程任务，并添加到列表中
for url in urls:
	task = asyncio.create_task(fetch(url))
	tasks.append(task)
if name == "main":
	asyncio.run(main())            