# 爬取代理网站的数据，做成动态ip代理池

"""
1、快代理
    https://www.kuaidaili.com/free/inha/1/
2、89代理
    http://www.89ip.cn/index_1.html
"""

import requests
import time
from lxml import html
from base.RedisHandler import RedisHandler


class GetIpHandler:
    def __init__(self):
        self.baidu_url = "https://www.baidu.com/"
        self.agent_kuai_url = "https://www.kuaidaili.com/free/inha/%s/"
        self.agent_89_url = "http://www.89ip.cn/index_1.html"
        self.headers = {
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.86 Safari/537.36"
        }
        self.redis = RedisHandler()

    def check_ip_nums(self):
        num = self.redis.get_ip_num()
        if num < 200:
            self.kuai_handler()

    def kuai_handler(self):
        for i in range(1, 31):
            self.get_kuai_ips(i)
            time.sleep(1)
        print(f"当前代理数量 {self.redis.get_ip_num()}")

    def get_kuai_ips(self, page):
        random_ip = self.redis.get_random_ip()
        proxies = {
            "http": "http://%s:%s" % (random_ip[0], random_ip[1])
        }
        response = requests.get(self.agent_kuai_url % str(page), headers=self.headers, proxies=proxies)
        html_et = html.etree.HTML(response.text)

        ips = html_et.xpath("//tbody/tr/td[@data-title='IP']/text()")
        # print(ips)
        ports = html_et.xpath("//tbody/tr/td[@data-title='PORT']/text()")
        # print(ports)
        for k, v in dict(zip(ips, ports)).items():
            proxies = {
                "http": "http://%s:%s" % (k, v)
            }
            resp_status = requests.get(self.baidu_url, proxies=proxies).status_code
            # print(resp_status)
            if resp_status == 200:
                print(f"当前代理 {k}: {v} 有效")
                self.redis.add_ip(k, v)
            else:
                print(f"当前代理 {k}: {v} 无效")

    def get_89_ips(self):
        pass


if __name__ == '__main__':
    obj = GetIpHandler()
    obj.kuai_handler()
