import asyncio
import concurrent
import json
import threading
import time

import aiohttp
import requests


headers = {
    "Accept": "*/*",
    "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
    "Cache-Control": "no-cache",
    "Connection": "keep-alive",
    "Pragma": "no-cache",
    "Referer": "https://quote.eastmoney.com/center/gridlist.html",
    "Sec-Fetch-Dest": "script",
    "Sec-Fetch-Mode": "no-cors",
    "Sec-Fetch-Site": "same-site",
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/142.0.0.0 Safari/537.36 Edg/142.0.0.0",
    "sec-ch-ua": "\"Chromium\";v=\"142\", \"Microsoft Edge\";v=\"142\", \"Not_A Brand\";v=\"99\"",
    "sec-ch-ua-mobile": "?0",
    "sec-ch-ua-platform": "\"Windows\""
}
cookies = {
    "fullscreengg": "1",
    "fullscreengg2": "1",
    "qgqp_b_id": "7bcdba56d53a2f98baaf0438a7035809",
    "st_nvi": "PPl7AH6V-VT1qFDyuavqp37f0",
    "st_si": "97807058336250",
    "st_asi": "delete",
    "nid": "02b649015c65d17cb4c2cb7f43485553",
    "nid_create_time": "1762236313470",
    "gvi": "kaMO25-NLSol8yJp-Q3j1a561",
    "gvi_create_time": "1762236313470",
    "st_pvi": "29344322964275",
    "st_sp": "2025-11-04%2014%3A05%3A12",
    "st_inirUrl": "",
    "st_sn": "30",
    "st_psi": "20251104141557932-113200301321-1709272268"
}




# all_stocks = []
#
# for page in range(1,273):
#     time.sleep(1)
#
#     url = (f"https://push2.eastmoney.com/api/qt/clist/get?np=1"
#            f"&fltt=1"
#            f"&invt=2"
#            f"&cb=jQuery37109998877634289544_1762236957607"
#            f"&fs=m%3A0%2Bt%3A6%2Bf%3A!2%2Cm%3A0%2Bt%3A80%2Bf%3A!2%2Cm%3A1%2Bt%3A2%2Bf%3A!2%2Cm%3A1%2Bt%3A23%2Bf%3A!2%2Cm%3A0%2Bt%3A81%2Bs%3A262144%2Bf%3A!2&fields=f12%2Cf13%2Cf14%2Cf1%2Cf2%2Cf4%2Cf3%2Cf152%2Cf5%2Cf6%2Cf7%2Cf15%2Cf18%2Cf16%2Cf17%2Cf10%2Cf8%2Cf9%2Cf23&fid=f3"
#            f"&pn={page}"
#            f"&pz=20&po=1&dect=1&ut=fa5fd1943c7b386f172d6893dbfba10b"
#            f"&wbp2u=%7C0%7C0%7C0%7Cweb&_=1762236957609")
#
#
#     response = requests.get(url, headers=headers,cookies= cookies)
#     text  = response.text
#     # 去掉 JSONP 包装
#     json_str = text[text.find("(") + 1: text.rfind(")")]
#     data = json.loads(json_str)
#
#     if data.get("data") and data["data"].get("diff"):
#         for item in data["data"]["diff"]:
#             code = item.get("f12")
#             name = item.get("f14")
#             print(code, name)
#             all_stocks.append((code, name))
#
#     print(f"第 {page} 页已获取，共 {len(all_stocks)} 条记录")


#异步
# async  def fetch(session,url):
#     async  with session.get(url,cookies=cookies) as response:
#         text = await response.text()
#         # 去掉 JSONP 包装
#         json_str = text[text.find("(") + 1: text.rfind(")")]
#
#         data = json.loads(json_str)
#
#
#         if data.get("data") and data["data"].get("diff"):
#             for item in data["data"]["diff"]:
#                 code = item.get("f12")
#                 name = item.get("f14")
#                 print(code, name)
#
# async  def main():
#     global page
#     async  with aiohttp.ClientSession() as session:
#         urls = [(f"https://push2.eastmoney.com/api/qt/clist/get?np=1"
#            f"&fltt=1"
#            f"&invt=2"
#            f"&cb=jQuery37109998877634289544_1762236957607"
#            f"&fs=m%3A0%2Bt%3A6%2Bf%3A!2%2Cm%3A0%2Bt%3A80%2Bf%3A!2%2Cm%3A1%2Bt%3A2%2Bf%3A!2%2Cm%3A1%2Bt%3A23%2Bf%3A!2%2Cm%3A0%2Bt%3A81%2Bs%3A262144%2Bf%3A!2&fields=f12%2Cf13%2Cf14%2Cf1%2Cf2%2Cf4%2Cf3%2Cf152%2Cf5%2Cf6%2Cf7%2Cf15%2Cf18%2Cf16%2Cf17%2Cf10%2Cf8%2Cf9%2Cf23&fid=f3"
#            f"&pn={page}"
#            f"&pz=20&po=1&dect=1&ut=fa5fd1943c7b386f172d6893dbfba10b"
#            f"&wbp2u=%7C0%7C0%7C0%7Cweb&_=1762236957609") for page in range(1,5)]
#         for i ,url in enumerate(urls):
#             print(f"第 {i+1} 页开始获取")
#
#         result = await asyncio.gather(*[fetch(session,url) for url in urls])
#
# asyncio.run(main())




#多进程，使用进程池

from multiprocessing import Pool
print_lock = threading.Lock()
urls = [(f"https://push2.eastmoney.com/api/qt/clist/get?np=1"
           f"&fltt=1"
           f"&invt=2"
           f"&cb=jQuery37109998877634289544_1762236957607"
           f"&fs=m%3A0%2Bt%3A6%2Bf%3A!2%2Cm%3A0%2Bt%3A80%2Bf%3A!2%2Cm%3A1%2Bt%3A2%2Bf%3A!2%2Cm%3A1%2Bt%3A23%2Bf%3A!2%2Cm%3A0%2Bt%3A81%2Bs%3A262144%2Bf%3A!2&fields=f12%2Cf13%2Cf14%2Cf1%2Cf2%2Cf4%2Cf3%2Cf152%2Cf5%2Cf6%2Cf7%2Cf15%2Cf18%2Cf16%2Cf17%2Cf10%2Cf8%2Cf9%2Cf23&fid=f3"
           f"&pn={page}"
           f"&pz=20&po=1&dect=1&ut=fa5fd1943c7b386f172d6893dbfba10b"
           f"&wbp2u=%7C0%7C0%7C0%7Cweb&_=1762236957609") for page in range(1,273)]


def fetch(url):
    time.sleep(1)
    response = requests.get(url, headers=headers,cookies= cookies)

    text =  response.text
        # 去掉 JSONP 包装
    json_str = text[text.find("(") + 1: text.rfind(")")]
    data = json.loads(json_str)
    if data.get("data") and data["data"].get("diff"):
        for item in data["data"]["diff"]:
            code = item.get("f12")
            name = item.get("f14")
            with print_lock:
                print(code, name)

#开启多进程
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
    for url in urls:
        executor.submit(fetch, url)











