# import requests
# from util.headers import get_headers_init_user_agent, get_headers_init_referer
#
# headers = get_headers_init_user_agent()
# headers.update(get_headers_init_referer('https://quote.eastmoney.com/center/gridlist.html'))
# headers.update({
#     'cookie': 'fullscreengg=1; fullscreengg2=1; qgqp_b_id=9703aaa1db33faa89ce40234dd1c5f5c; st_nvi=iA0FxtPhqUB5iga8Jh0Ib4f26; st_si=37633731493983; st_asi=delete; nid=0482c65e4b4f2fe15e5e5b5d38746e5c; nid_create_time=1762235711082; gvi=NedW_BUUygi62shh9H8sZ52d6; gvi_create_time=1762235711082; st_pvi=36186767154013; st_sp=2025-11-04%2013%3A55%3A10; st_inirUrl=; st_sn=13; st_psi=20251104140715680-113200301321-2759944614'
# })
#
# for page in range(1, 274):
#     print(f"正在爬取第{page}页")
#     url = f'https://push2.eastmoney.com/api/qt/clist/get?np=1&fltt=1&invt=2&fs=m%3A0%2Bt%3A6%2Bf%3A!2%2Cm%3A0%2Bt%3A80%2Bf%3A!2%2Cm%3A1%2Bt%3A2%2Bf%3A!2%2Cm%3A1%2Bt%3A23%2Bf%3A!2%2Cm%3A0%2Bt%3A81%2Bs%3A262144%2Bf%3A!2&fields=f12%2Cf13%2Cf14%2Cf1%2Cf2%2Cf4%2Cf3%2Cf152%2Cf5%2Cf6%2Cf7%2Cf15%2Cf18%2Cf16%2Cf17%2Cf10%2Cf8%2Cf9%2Cf23&fid=f3&pn={page}&pz=20&po=1&dect=1&ut=fa5fd1943c7b386f172d6893dbfba10b&wbp2u=%7C0%7C0%7C0%7Cweb&_=1762236174807'
#     res = requests.get(url, headers=headers)
#     data = res.json()
#     # print(data)
#     for item in data['data']['diff']:
#         print(item['f14'])
import threading

import asyncio
import aiohttp
from util.getheaders import get_init_headers, get_headers_init_referer

headers = get_init_headers()
headers.update(get_headers_init_referer('https://quote.eastmoney.com/center/gridlist.html'))
headers.update({
    'cookie': 'fullscreengg=1; fullscreengg2=1; qgqp_b_id=9703aaa1db33faa89ce40234dd1c5f5c; st_nvi=iA0FxtPhqUB5iga8Jh0Ib4f26; st_si=37633731493983; st_asi=delete; nid=0482c65e4b4f2fe15e5e5b5d38746e5c; nid_create_time=1762235711082; gvi=NedW_BUUygi62shh9H8sZ52d6; gvi_create_time=1762235711082; st_pvi=36186767154013; st_sp=2025-11-04%2013%3A55%3A10; st_inirUrl=; st_sn=13; st_psi=20251104140715680-113200301321-2759944614'
})


async def fetch(session, url):
    async with session.get(url) as response:
        data = await response.json()
        return data
        # for item in data['data']['diff']:
        #     return item['f14']


async def main():
    async with aiohttp.ClientSession(headers=headers) as session:
        urls = [f'https://push2.eastmoney.com/api/qt/clist/get?np=1&fltt=1&invt=2&fs=m%3A0%2Bt%3A6%2Bf%3A!2%2Cm%3A0%2Bt%3A80%2Bf%3A!2%2Cm%3A1%2Bt%3A2%2Bf%3A!2%2Cm%3A1%2Bt%3A23%2Bf%3A!2%2Cm%3A0%2Bt%3A81%2Bs%3A262144%2Bf%3A!2&fields=f12%2Cf13%2Cf14%2Cf1%2Cf2%2Cf4%2Cf3%2Cf152%2Cf5%2Cf6%2Cf7%2Cf15%2Cf18%2Cf16%2Cf17%2Cf10%2Cf8%2Cf9%2Cf23&fid=f3&pn={page}&pz=20&po=1&dect=1&ut=fa5fd1943c7b386f172d6893dbfba10b&wbp2u=%7C0%7C0%7C0%7Cweb&_=1762236174807' for page in range(1, 3)]


        # tasks = [fetch(session, url) for url in urls]
        batch_size = 2
        for i in range(0, len(urls), batch_size):
            temp_urls = urls[i: i + batch_size]
            tasks = [fetch(session, url) for url in temp_urls]
            datas = await asyncio.gather(*tasks)
            print(datas)
            print(len(datas))

            await asyncio.sleep(1)
asyncio.run(main())




#
# import threading
#
# def t_main():
#     print(f'线程名: {threading.current_thread().name}')
#
# def main():
#     threads = []
#     for i in range(274):
#         t = threading.Thread(target=t_main, name=f'thread_{i}')
#         threads.append(t)
#         t.start()
#
#     for t in threads:
#         t.join()
#
# if __name__ == '__main__':
#     main()




# import requests
# import concurrent.futures
# import os
# from util.getheaders import get_init_headers, get_headers_init_referer
#
# headers = get_init_headers()
# headers.update(get_headers_init_referer('https://quote.eastmoney.com/center/gridlist.html'))
# headers.update({
#     'cookie': 'fullscreengg=1; fullscreengg2=1; qgqp_b_id=9703aaa1db33faa89ce40234dd1c5f5c; st_nvi=iA0FxtPhqUB5iga8Jh0Ib4f26; st_si=37633731493983; st_asi=delete; nid=0482c65e4b4f2fe15e5e5b5d38746e5c; nid_create_time=1762235711082; gvi=NedW_BUUygi62shh9H8sZ52d6; gvi_create_time=1762235711082; st_pvi=36186767154013; st_sp=2025-11-04%2013%3A55%3A10; st_inirUrl=; st_sn=13; st_psi=20251104140715680-113200301321-2759944614'
# })
#
#
# urls = [f'https://push2.eastmoney.com/api/qt/clist/get?np=1&fltt=1&invt=2&fs=m%3A0%2Bt%3A6%2Bf%3A!2%2Cm%3A0%2Bt%3A80%2Bf%3A!2%2Cm%3A1%2Bt%3A2%2Bf%3A!2%2Cm%3A1%2Bt%3A23%2Bf%3A!2%2Cm%3A0%2Bt%3A81%2Bs%3A262144%2Bf%3A!2&fields=f12%2Cf13%2Cf14%2Cf1%2Cf2%2Cf4%2Cf3%2Cf152%2Cf5%2Cf6%2Cf7%2Cf15%2Cf18%2Cf16%2Cf17%2Cf10%2Cf8%2Cf9%2Cf23&fid=f3&pn={page}&pz=20&po=1&dect=1&ut=fa5fd1943c7b386f172d6893dbfba10b&wbp2u=%7C0%7C0%7C0%7Cweb&_=1762236174807' for page in range(1, 274)]
#
#
# def fetch(url):
#     # return f'线程id：{threading.current_thread().ident}--- {url}',
#     res = requests.get(url, headers=headers)
#     print(url, res.json())
#     # return url, res.json()
#
#
# with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
#     for url in urls:
#         executor.submit(fetch, url)

    # futures = [executor.submit(fetch, url) for url in urls]
    # for future in concurrent.futures.as_completed(futures):
    #     print(future.result())