import requests
from bs4 import BeautifulSoup
import time

headers = {
    "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
    "accept-language": "zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
    "priority": "u=0, i",
    "referer": "https://www.che168.com/china/a0_0msdgscncgpi1ltocsp10exx0/?pvareaid=102179",
    "sec-ch-ua": "\"Microsoft Edge\";v=\"137\", \"Chromium\";v=\"137\", \"Not/A)Brand\";v=\"24\"",
    "sec-ch-ua-mobile": "?0",
    "sec-ch-ua-platform": "\"Windows\"",
    "sec-fetch-dest": "document",
    "sec-fetch-mode": "navigate",
    "sec-fetch-site": "same-origin",
    "sec-fetch-user": "?1",
    "upgrade-insecure-requests": "1",
    "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36 Edg/137.0.0.0"
}
cookies = {
    "userarea": "0",
    "listuserarea": "0",
    "fvlid": "1750658571078rhFw657EDPnF",
    "sessionid": "302a623c-6905-440a-950c-692b1bd8cf52",
    "che_sessionid": "D5765336-29E0-4054-A571-7CD14D7AD870%7C%7C2025-06-23+14%3A02%3A54.835%7C%7C0",
    "Hm_lvt_d381ec2f88158113b9b76f14c497ed48": "1750658572",
    "HMACCOUNT": "A8EA38DBEA1C6BFA",
    "searchhistory": "0%7C0%7C%E5%85%B0%E5%8D%9A%E5%9F%BA%E5%B0%BC%E5%A4%A7%E7%89%9B%2C0%7C0%7C%E5%85%B0%E5%8D%9A%E5%9F%BA%E5%B0%BC",
    "searchhistoryval": "%E5%85%B0%E5%8D%9A%E5%9F%BA%E5%B0%BC%E5%A4%A7%E7%89%9B",
    "acw_tc": "0a1c68b017506673457153572e7408a6de145d458e35c8b76a9776f8e435c9",
    "sessionip": "106.34.4.53",
    "area": "419999",
    "sessionvisit": "7a0ed53e-6e4e-4cf9-9ee4-17912e9e5fca",
    "sessionvisitInfo": "302a623c-6905-440a-950c-692b1bd8cf52||0",
    "che_sessionvid": "7A5FB680-5ACA-44D7-8D54-7E91200344E9",
    "ahpvno": "22",
    "Hm_lpvt_d381ec2f88158113b9b76f14c497ed48": "1750668183",
    "showNum": "22",
    "ahuuid": "95CF6320-49E5-4574-B8C4-FB057B31E0F8",
    "v_no": "22",
    "visit_info_ad": "D5765336-29E0-4054-A571-7CD14D7AD870||7A5FB680-5ACA-44D7-8D54-7E91200344E9||-1||-1||22",
    "che_ref": "0%7C0%7C0%7C0%7C2025-06-23+16%3A43%3A06.296%7C2025-06-23+14%3A02%3A54.835",
    "sessionuid": "302a623c-6905-440a-950c-692b1bd8cf52"
}

def get_ip():
    kdl_url = "https://dps.kdlapi.com/api/getdps/?secret_id=o6ykpp1tqja9zj9g4947&signature=ne52s9ngdvy61nrghkzf7kytjk7c3qgk&num=1&format=json&sep=1"
    ip = requests.get(kdl_url).json()["data"]["proxy_list"][0]
    proxies = {
        # "http://账户名:密码@IP地址:端口号",
        'http': f'http://d2769292027:8bnrqo2g@{ip}',
        'https': f'http://d2769292027:8bnrqo2g@{ip}',
    }
    return proxies


proxies = get_ip()

error_count = 0  # 错误计数器
max_errors_before_change = 4  # 更换IP的错误阈值
count = 0  # 计数
try:
    for page in range(1, 101):
        url = f"https://www.che168.com/china/a0_0msdgscncgpi1ltocsp{page}exx0/"
        params = {
            "pvareaid": "102179"
        }

        response = requests.get(url, headers=headers, cookies=cookies, params=params, proxies=proxies)
        try:
            html_str = response.content.decode('utf-8')
        except UnicodeDecodeError:
            html_str = response.content.decode('gbk')
        root = BeautifulSoup(html_str, "lxml")
        car_list = root.select("div[class='tp-cards-tofu fn-clear'] a[class='carinfo'] > div[class='cards-bottom']")

        print(f"第{page}页开始****************************")

        for car in car_list:
            count += 1
            car_title = car.select_one("h4[class='card-name']").text
            cards_unit = car.select_one("p[class='cards-unit']").text.strip().split("／")
            car_kilometers = cards_unit[0]
            car_year = cards_unit[1]
            car_price = car.select_one("div.cards-price-box > span").text

            print(f"{count},{car_title}, {car_kilometers}, {car_year}, 实际价格:{car_price}")
        print(f"第{page}页结束___________________________")
        time.sleep(2)
except Exception as e:
    error_count += 1
    print(f"\n发生错误 ({error_count}/{max_errors_before_change}): {type(e).__name__}")
    print(f"错误详情: {str(e)}")
    # 错误次数达到阈值时更换IP
    if error_count >= max_errors_before_change:
        print("\n⚠️ 错误次数达到阈值，正在更换IP...")
        proxies = get_ip()
        error_count = 0  # 重置计数器
        print(f"新IP: {proxies.get('http', '未知')}")
    # 等待3秒后重试
    time.sleep(3)
