import requests
from fake_useragent import UserAgent
from bs4 import BeautifulSoup
from loguru import logger

class GetProxyIP:

    header = {
        "User-Agent": UserAgent().random
    }
    base_url = 'https://www.zdaye.com/free/'
    check_url = "https://www.ip.cn/"


    def get_url_html(self,url):
        try:
            request = requests.get(
                url=url,
                headers=self.header,
                timeout=5
                )
            if request.status_code == 200:
                return request.content
            return None
        except Exception as e:
            logger.warning(e)
    def check_ip(self,ip_info):
        """测试IP地址是否有效"""
        ip_url = ip_info.get("ip")+ ":" + str(ip_info.get("port"))
        proxies = {"http":"http://"+ip_url,"https":'https://'+ip_url}
        res = False
        try:
            request = requests.get(
                url=self.check_url,
                headers=self.header,
                timeout=1
            )
            if request.status_code == 200:
                res = True
        except Exception as e:
            res = False
            logger.warning(e)
        return res
    
    def run(self):

        page_list = range(1,2)
        for page in page_list:
            logger.info('开始爬取第{}页IP数据'.format(int(page)))
            ip_url = self.base_url+str(page)
            html = self.get_url_html(ip_url)
            print(html)
            soup = BeautifulSoup(html,'html.parser')
            ip_list = soup.select('#list table')
            for ip_tr in ip_list:
                td_list = ip_tr.select('td')
                ip_address = td_list[0].get_text()
                ip_port = td_list[1].get_text()
                ip_type = td_list[3].get_text()
                #format_proxy_url = "{}://{}:{}".format(ip_type,ip_address,ip_port)
                info = {'ip': ip_address, 'port': ip_port, 'type': ip_type}
                logger.info(info)
                # if self.check_ip(info):
                #     logger.info("IP有效：{}".format(info))


if __name__ == "__main__":
    get_proxy_ip = GetProxyIP()
    get_proxy_ip.get_url_html("https://www.zdaye.com/free/")