import logging
import time
import bs4
import requests

logging.basicConfig(level=logging.INFO)


class IpPool:
    def __init__(self):
        self.__url = "https://www.kuaidaili.com/free/inha/{}/"
        self.ipList = []
        self.__headers = {
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:105.0) Gecko/20100101 Firefox/105.0"
        }

    def getHtml(self, url):
        r = requests.get(url, headers=self.__headers)
        return r.text

    def addProxy(self, url):
        html = self.getHtml(url)
        soup = bs4.BeautifulSoup(html, "lxml")
        trs = soup.select("tbody tr")
        for tr in trs:
            tds = tr.find_all("td")
            proxy = {tds[3].string.strip(): "{}:{}".format(tds[0].string.strip(), tds[1].string.strip())}
            if self.testProxy(proxy):
                self.ipList.append(proxy)

    def testProxy(self, proxy):
        try:
            r = requests.get("https://httpbin.org/get", headers=self.__headers, proxies=proxy, timeout=1)
            return r.status_code == 200
        except Exception as e:
            return False

    def crawl(self, index):
        for i in range(1, index + 1):
            logging.info("Crawling Page {}".format(i))
            page = self.__url.format(i)
            time.sleep(1)
            self.addProxy(page)


if __name__ == "__main__":
    ipPool = IpPool()
    ipPool.crawl(3)
    print("可用代理IP:", len(ipPool.ipList))
