import time

import requests
from fake_useragent import UserAgent
from lxml import etree
username = "d2356162166"
password = "l7zee1jn"
IP = "117.90.218.85:23615"
# 代理
proxies = {
    "http": f"http://{username}:{password}@{IP}",
    "https": f"http://{username}:{password}@{IP}",
}

for page in range(578):
    ua = UserAgent().random
    headers = {
        "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
        "Accept-Language": "zh-CN,zh;q=0.9",
        "Cache-Control": "no-cache",
        "Connection": "keep-alive",
        "Pragma": "no-cache",
        "Referer": "https://www.bjcourt.gov.cn/zxxx/indexOld.htm?st=1&zxxxlx=100013007&bzxrlx=&bzxrxm=&zrr=&frhqtzz=&jbfyId=&ah=&dqxh=22&page=2",
        "Sec-Fetch-Dest": "document",
        "Sec-Fetch-Mode": "navigate",
        "Sec-Fetch-Site": "same-origin",
        "Sec-Fetch-User": "?1",
        "Upgrade-Insecure-Requests": "1",
        "User-Agent": ua,
        "sec-ch-ua": "\"Chromium\";v=\"130\", \"Google Chrome\";v=\"130\", \"Not?A_Brand\";v=\"99\"",
        "sec-ch-ua-mobile": "?0",
        "sec-ch-ua-platform": "\"Windows\""
    }
    cookies = {
        "wzws_sessionid": "gDEyMy4xNjAuMjI3LjI1NIE3Njc1ZDWCZTQ4MGM0oGc7QA4=",
        "thguid-r": "1694124181681074176",
        "vh2PJqrvyx": "MDAwM2IyYWYxZTQwMDAwMDAwMDQwaA8QWywxNzMyMDE2NzMy",
        "6JDgKK8lEy": "MDAwM2IyYWYxZTQwMDAwMDAwMDMwG2IkfGAxNzMyMDE2NzMy",
        "JSESSIONID": "node019zptc8kxuy8r1git79vd4n9y43822126.node0"
    }
    url = "https://www.bjcourt.gov.cn/zxxx/indexOld.htm"
    params = {
        "st": "1",
        "zxxxlx": "100013007",
        "bzxrlx": "",
        "bzxrxm": "",
        "zrr": "",
        "frhqtzz": "",
        "jbfyId": "",
        "ah": "",
        "dqxh": "22",
        "page": page
    }
    response = requests.get(url, headers=headers, cookies=cookies, params=params)

    res = response.content.decode("utf-8")
    with open("data.html", "w", encoding="utf-8") as f:
        f.write(res)
    result = etree.HTML(res)
    information_list = result.xpath("//div[@class='grid_layout_1000']/table[@class='table_list_02']//tr")[1:]
    print(information_list)
    for information in information_list:
        information_id = information.xpath('./td[1]/text()')
        information_name = information.xpath('./td[@class="td_even"]/text()')

        print(information_id, information_name)
        with open("data.txt", "a", encoding="utf-8") as f:
            f.write(str(information_id) + str(information_name) + "\n")
