import random
import threading

import requests

headers = {
    'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/141.0.0.0 Safari/537.36',
    'cookie': 'ullscreengg=1; fullscreengg2=1; qgqp_b_id=fd5afabcdac9f80176bcfde2ce04e4d0; st_nvi=TBlxsQLCHlnp3rAYcDOY021ec; st_si=46120325553474; st_asi=delete; nid=01a3783fdb18a0d023efd07096e22b72; nid_create_time=1762236218405; gvi=S22V2zW47amqiuHwZ-7UO45bd; gvi_create_time=1762236218405; st_pvi=41582380790477; st_sp=2025-11-04%2014%3A03%3A38; st_inirUrl=; st_sn=7; st_psi=20251104154033656-113200301321-9532209340',
    'referer': 'https://quote.eastmoney.com/center/gridlist.html'
}
# for page in range(1,274):
#     print(f'开始{page}页')
#     url = f'https://push2.eastmoney.com/api/qt/clist/get?np=1&fltt=1&invt=2&fs=m%3A0%2Bt%3A6%2Bf%3A!2%2Cm%3A0%2Bt%3A80%2Bf%3A!2%2Cm%3A1%2Bt%3A2%2Bf%3A!2%2Cm%3A1%2Bt%3A23%2Bf%3A!2%2Cm%3A0%2Bt%3A81%2Bs%3A262144%2Bf%3A!2&fields=f12%2Cf13%2Cf14%2Cf1%2Cf2%2Cf4%2Cf3%2Cf152%2Cf5%2Cf6%2Cf7%2Cf15%2Cf18%2Cf16%2Cf17%2Cf10%2Cf8%2Cf9%2Cf23&fid=f3&pn={page}&pz=20&po=1&dect=1&ut=fa5fd1943c7b386f172d6893dbfba10b&wbp2u=%7C0%7C0%7C0%7Cweb&_=1762242032855'
#     response = requests.get(url, headers=headers)
#     data = response.json()
#     for i in data['data']['diff']:
#         print(i['f14'])


# import asyncio
# import aiohttp
#
#
# async def fetch(session, url):
#     async with session.get(url) as response:
#         data = await response.json()
#         for i in data['data']['diff']:
#             print(i['f14'])
#
#
# async def main():
#     async with aiohttp.ClientSession(headers=headers) as session:
#         urls = [
#             f'https://push2.eastmoney.com/api/qt/clist/get?np=1&fltt=1&invt=2&fs=m%3A0%2Bt%3A6%2Bf%3A!2%2Cm%3A0%2Bt%3A80%2Bf%3A!2%2Cm%3A1%2Bt%3A2%2Bf%3A!2%2Cm%3A1%2Bt%3A23%2Bf%3A!2%2Cm%3A0%2Bt%3A81%2Bs%3A262144%2Bf%3A!2&fields=f12%2Cf13%2Cf14%2Cf1%2Cf2%2Cf4%2Cf3%2Cf152%2Cf5%2Cf6%2Cf7%2Cf15%2Cf18%2Cf16%2Cf17%2Cf10%2Cf8%2Cf9%2Cf23&fid=f3&pn={page}&pz=20&po=1&dect=1&ut=fa5fd1943c7b386f172d6893dbfba10b&wbp2u=%7C0%7C0%7C0%7Cweb&_=1762242032855'
#             for page in range(1, 274)]
#         for i in range(0, len(urls), 2):
#             batch = urls[i:i + 2]
#             tasks = [fetch(session, url) for url in batch]
#             await asyncio.gather(*tasks)
#             await asyncio.sleep(random.uniform(1, 2))
#
#
# asyncio.run(main())

import concurrent.futures

urls = [
    f'https://push2.eastmoney.com/api/qt/clist/get?np=1&fltt=1&invt=2&fs=m%3A0%2Bt%3A6%2Bf%3A!2%2Cm%3A0%2Bt%3A80%2Bf%3A!2%2Cm%3A1%2Bt%3A2%2Bf%3A!2%2Cm%3A1%2Bt%3A23%2Bf%3A!2%2Cm%3A0%2Bt%3A81%2Bs%3A262144%2Bf%3A!2&fields=f12%2Cf13%2Cf14%2Cf1%2Cf2%2Cf4%2Cf3%2Cf152%2Cf5%2Cf6%2Cf7%2Cf15%2Cf18%2Cf16%2Cf17%2Cf10%2Cf8%2Cf9%2Cf23&fid=f3&pn={page}&pz=20&po=1&dect=1&ut=fa5fd1943c7b386f172d6893dbfba10b&wbp2u=%7C0%7C0%7C0%7Cweb&_=1762242032855'
    for page in range(1, 274)]


def thread_fun2(url):
    resp = requests.get(url)
    data = resp.json()
    for i in data['data']['diff']:
        print(i['f14'])


with concurrent.futures.ThreadPoolExecutor(max_workers=6) as executor:
    for url in urls:
        executor.submit(thread_fun2,url)
