import aiohttp
import asyncio
from fake_useragent import FakeUserAgent
from lxml import etree
import pymongo


ua = FakeUserAgent()

client = pymongo.MongoClient()
client.drop_database("北京法院")
db = client.get_database("北京法院")
collection = db.get_collection("执行信息")

headers = {
	"user-agent": ua.random,
	"referer": "https://www.bjcourt.gov.cn/"
}

urls = [
	f"https://www.bjcourt.gov.cn/zxxx/indexOld.htm?st=1&zxxxlx=100013007&bzxrlx=&bzxrxm=&zrr=&frhqtzz=&jbfyId=&ah=&dqxh=26&page={page}"
	for page in range(1, 439)]


async def request(session):
	while urls:
		url = urls.pop()
		print(f"正在爬取{url}......")
		async with session.get(url, headers=headers) as res:
			res = await res.text()
			tree = etree.HTML(res)
			items = tree.xpath('//table[@class="table_list_02"]/tr[position()>1]')

			datas = []
			for item in items:
				tds = item.xpath('./td/text()')
				data = {
					"id": tds[0],
					"name": tds[1].strip(),
					"type": tds[2],
					"value": tds[3],
					"no": tds[4],
					"address": tds[5],
					"time": tds[6] if len(tds) > 6 else ''
				}
				datas.append(data)
			collection.insert_many(datas)



async def task():
	async with aiohttp.ClientSession() as session:
		return await request(session)


async def main():
	await asyncio.gather(*[asyncio.create_task(task()) for i in range(10)])



asyncio.run(main())

client.close()





# 基础版本
# async def request(url):
# 	async with aiohttp.ClientSession() as session:
# 		async with session.get(url) as res:
# 			r = await res.text()
# 			return r
#
#
# async def main():
# 	url = "https://www.bjcourt.gov.cn/zxxx/indexOld.htm?st=1&zxxxlx=100013007&bzxrlx=&bzxrxm=&zrr=&frhqtzz=&jbfyId=&ah=&dqxh=26&page=438"
# 	rs = await asyncio.gather(*[asyncio.create_task(request(url))])
# 	for r in rs:
# 		print(r)
#
#
# asyncio.run(main())
