import  requests
import  re
import  json



# urls = [f'https://api.web.360kan.com/v1/rank?cat={page}'for page in range(1, 7)]


headers = {
    "accept": "*/*",
    "accept-encoding": "gzip, deflate, br, zstd",
    "accept-language": "zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7",
    "cache-control": "no-cache",
    "cookie": "__guid=33727651.3194041714110691300.1762222391739.0059; ___sid=33727651.2246386221110949600.1762222391821.0015; monitor_count=3; __DC_gid=33727651.307210930.1762222391820.1762222450719.4",
    "pragma": "no-cache",
    "referer": "https://360kan.com/rank/general",
    "sec-ch-ua": "\"Chromium\";v=\"142\", \"Google Chrome\";v=\"142\", \"Not_A Brand\";v=\"99\"",
    "sec-ch-ua-mobile": "?0",
    "sec-ch-ua-platform": "\"Windows\"",
    "sec-fetch-dest": "script",
    "sec-fetch-mode": "no-cors",
    "sec-fetch-site": "same-site",
    "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/142.0.0.0 Safari/537.36"
}
# res = requests.get(url,headers=headers)
# # print(res.text)
#
# # 去掉 JSONP 包裹：__jp0(...);
# json_str = re.search(r'__jp0\((.*)\);?', res.text, re.DOTALL).group(1)
#
# # 解析为 JSON
# data = json.loads(json_str)
# print(data)
# # 打印验证
# print(json.dumps(data, ensure_ascii=False, indent=2))



# 异步技术解决（携程）

import  asyncio
import  aiohttp

#
# async def fetch(session, url):
#     async with session.get(url) as response:
#         response = await response.json()
#         print(response)
# async def main():
#     async with aiohttp.ClientSession() as session:
#         urls = [f'https://api.web.360kan.com/v1/rank?cat={page}' for page in range(1, 7)]
#         tasks = [fetch(session, url) for url in urls]
#         await asyncio.gather(*tasks)
# asyncio.run(main())

# async def fetch(session,url):
#     async with session.get(url) as response:
#         return await response.json()
#
# async def main():
#     async with aiohttp.ClientSession() as session:
#         url = 'https://api.web.360kan.com/v1/rank?cat=3'
#         data = await fetch(session,url)
#         print(data)
# asyncio.run(main())


# async def fetch(session,url):
#     async with session.get(url) as response:
#         print(await response.json())
#         return await response.json()
#
# async def main():
#     async with aiohttp.ClientSession() as session:
#         urls = [f'https://api.web.360kan.com/v1/rank?cat={page}'for page in range(7)]
#         data = [fetch(session,url) for url in urls]
#         await asyncio.gather(*data)
#
# asyncio.run(main())













# 多线程
import threading
import time
import requests

def t_main(url):
    time.sleep(1)
    response = requests.get(url)
    print(f"线程{threading.current_thread().name} 获取数据", response.json())
def main():
    print(f"主线程开始执行")
    urls = [f'https://api.web.360kan.com/v1/rank?cat={page}'for page in range(1, 7)]
    threads = []
    for url in urls:
        t = threading.Thread(target=t_main,name=f'thread-{url}', args=(url,))
        threads.append(t)
        t.start()
    for t in threads:
        t.join()
    print(f"主线程结束执行了")

if __name__ == '__main__':
    main()