import asyncio
import threading

import aiohttp
import requests


headers = {
    "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
    "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
    "Cache-Control": "no-cache",
    "Connection": "keep-alive",
    "Pragma": "no-cache",
    "Sec-Fetch-Dest": "document",
    "Sec-Fetch-Mode": "navigate",
    "Sec-Fetch-Site": "same-origin",
    "Sec-Fetch-User": "?1",
    "Upgrade-Insecure-Requests": "1",
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/142.0.0.0 Safari/537.36 Edg/142.0.0.0",
    "sec-ch-ua": "\"Chromium\";v=\"142\", \"Microsoft Edge\";v=\"142\", \"Not_A Brand\";v=\"99\"",
    "sec-ch-ua-mobile": "?0",
    "sec-ch-ua-platform": "\"Windows\""
}
cookies = {
    "__guid": "223794332.188323413067424200.1762222444293.414",
    "___sid": "223794332.1976048719642449000.1762222444330.6045",
    "refer_scene": "47003",
    "__DC_monitor_count": "3",
    "monitor_count": "3",
    "__DC_sid": "121874957.2689278775259793000.1762222481614.1628",
    "open-screen-ad:close": "1",
    "__huid": "11qdV1R14pC4vIzMZG4D%2BJUpKZIJfumwzc1mQGFkl0%2FGc%3D",
    "gtHuid": "1",
    "rt-btm-popup-ad:close": "1",
    "__DC_gid": "223794332.323063190.1762222444329.1762222515740.16"
}



#异步

#
# async def fetch(session,url):
#     async with session.get(url) as response:
#         response =  await response.json()
#
#         print( response)
#
#
#
# async  def main():
#     async with aiohttp.ClientSession() as session:
#         await asyncio.gather(*[fetch(session,f"https://api.web.360kan.com/v1/rank?cat={i}" )for i in range(1,8)])
#
#
# asyncio.run(main())


#多线程


# def fetch(url):
#     response = requests.get(url,headers=headers,cookies=cookies)
#     data =  response.json()
#     print(data)
#
# def main():
#     threads=[]
#     urls = [f"https://api.web.360kan.com/v1/rank?cat={i}" for i in range(1,8)]
#     for index,url in enumerate(urls):
#         t = threading.Thread(target=fetch,args=(url,))
#         threads.append(t)
#         t.start()
#
#     for t in threads:
#         t.join()
#
# main()













