import time
import requests
import aiohttp
import asyncio
from fake_useragent import UserAgent
import concurrent.futures

url = 'http://www.xinfadi.com.cn/getPriceData.html'
headers = {
    'user-agent': UserAgent().random,
}


# for page in range(1, 6181):
#     data = {
#         'limit': 20,
#         'current': page,
#         'pubDateStartTime': '2023/01/01'
#     }
#     response = requests.post(url, data=data)
#     items = response.json()
#     for i in items['list']:
#         print(i)
#     time.sleep(1)


def make_data(page):
    return {
        'limit': 20,
        'current': page,
        'pubDateStartTime': '2023/01/01'
    }


async def fetch(session, data):
    async with session.post(url, data=data) as response:
        print(f'开始{data['current']}页')
        data = await response.json()
        for i in data['list']:
            print(i)


async def main():
    async with aiohttp.ClientSession() as session:
        datas = [make_data(page) for page in range(1, 6181)]
        for i in range(0, len(datas), 3):
            batch = datas[i:i + 3]
            tasks = [fetch(session, data) for data in batch]
            await asyncio.gather(*tasks)
            await asyncio.sleep(1)


asyncio.run(main())

datas = [make_data(page) for page in range(1, 6181)]


def fetch(data):
    resp = requests.post(url, headers=headers, data=data)
    data = resp.json()
    for i in data['list']:
        print(i)


with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
    for data in datas:
        executor.submit(fetch, data)
