import requests
from bs4 import BeautifulSoup
from check_ip import check_proxy_ip


headers = {
    "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36",
    "cookie": "Hm_lvt_f9e56acddd5155c92b9b5499ff966848=1729747136,1730809063,1731138597; HMACCOUNT=727BA1F6FD1A51FF; https_waf_cookie=0c566412-496e-432bca4240f56b9148ad56cae09add02e8e3; https_ydclearance=3a525554ef948a6ad1ac98e6-55b8-4280-b332-2accc72142d1-1731162670; Hm_lpvt_f9e56acddd5155c92b9b5499ff966848=1731155471"

}

for page in range(1, 7):
    url = f"https://www.89ip.cn/index_{page}.html"
    html = requests.get(url, headers=headers).text

    bs = BeautifulSoup(html, "lxml")
    """
    div.layui-col-md8 > div > div.layui-form > table > tbody > tr
    
    td:nth-child(1)
    """
    tr_list = bs.select("div.layui-col-md8 > div > div.layui-form > table > tbody > tr")

    for tr in tr_list:
        ip = tr.select_one("td:nth-child(1)").text.strip()
        port = tr.select_one("td:nth-child(2)").text.strip()

        rv = check_proxy_ip(ip, port)
        if rv:
            print(ip, port, " 有效!")