import requests
import sqlite3
import pandas as pd

conn = sqlite3.connect('dyu.db')
headers = {
    "accept": "application/json, text/plain, */*",
    "accept-language": "zh-CN,zh;q=0.9",
    "cache-control": "no-cache",
    "pragma": "no-cache",
    "priority": "u=1, i",
    "referer": "https://www.douyu.com/g_LOL",
    "sec-ch-ua": "\"Google Chrome\";v=\"129\", \"Not=A?Brand\";v=\"8\", \"Chromium\";v=\"129\"",
    "sec-ch-ua-mobile": "?0",
    "sec-ch-ua-platform": "\"Windows\"",
    "sec-fetch-dest": "empty",
    "sec-fetch-mode": "cors",
    "sec-fetch-site": "same-origin",
    "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36",
    "x-requested-with": "XMLHttpRequest"
}
cookies = {
    "dy_did": "3d684a2c5532f0888ab589c000071701",
    "Hm_lvt_e99aee90ec1b2106afe7ec3b199020a7": "1728717602",
    "HMACCOUNT": "DA4C4B31E32015C2",
    "_ga": "GA1.1.2138992121.1728717602",
    "acf_ssid": "1729705513344110070",
    "acf_web_id": "7424785564609591055",
    "acf_did": "3d684a2c5532f0888ab589c000071701",
    "acf_ab_pmt": "20100212%23webnewhome%23B%2C20100254%23WebTool0703%23new%2C1479%23cover_select_web%23B%2C20100249%23webTagRank%23B%2C20100248%23webTagHover%23B%2C1657%23leftBar%23a",
    "acf_ab_ver_all": "20100212%2C20100254%2C1479%2C20100249%2C20100248%2C1657",
    "acf_ab_vs": "webnewhome%3DB%2CWebTool0703%3Dnew%2Ccover_select_web%3DB%2CwebTagRank%3DB%2CwebTagHover%3DB%2CleftBar%3Da",
    "_clck": "12xlsv7%7C2%7Cfpy%7C0%7C1746",
    "_clsk": "jomfm5%7C1728718423613%7C3%7C0%7Cx.clarity.ms%2Fcollect",
    "Hm_lpvt_e99aee90ec1b2106afe7ec3b199020a7": "1728718428",
    "_ga_5JKQ7DTEXC": "GS1.1.1728717602.1.1.1728718428.54.0.520407253"
}
url = "https://www.douyu.com/gapi/rknc/directory/mixListV1/2_{}/{}"
params = {
    "readList": "9063322"
}


def get_cateId():

    url = 'https://www.douyu.com/japi/weblist/apinc/header/cate'
    resp = requests.get(url)
    lis = list()
    for i in resp.json()['data']['cateList'][1]['list']:
        lis.append(i['tagId'])
    return lis

for cate in get_cateId():
    print('cate :' + str(cate))
    for page in range(1, 4):
        print(f'第{page}页数据:')
        response = requests.get(url.format(cate,page), headers=headers, cookies=cookies, params=params)
        dat = response.json()
        rn_lis = list()
        nn_lis = list()
        ol_lis = list()

        for d in dat['data']['rl']:

            rn_lis.append(d['rn'])
            nn_lis.append(d['nn'])
            ol_lis.append(d['ol'])

        pd.DataFrame({'title':rn_lis,'name':nn_lis,'heat':ol_lis}).to_sql('dyu',conn,if_exists='append',index=False)




