import requests
from lxml import etree
import json

class FreeProxys(object):
    def __init__(self, start_url):
        self.start_url = start_url
        self.headers = {
            "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.117 Safari/537.36"
        }
        self.ip_info = {}
    
    def get_html(self):
        respon = requests.get(self.start_url, headers=self.headers)
        return respon.content.decode()

    def parse_html(self, respon):
        html = etree.HTML(respon)
        selectors = html.xpath("//div[@id='freelist']//table[@class='table table-b table-bordered table-striped']//tbody//tr")
        for item in selectors:
            self.ip_info["ip"] = item.xpath("./td[1]/text()")
            self.ip_info["port"] = item.xpath("./td[2]/text()")
            self.ip_info["type"] = item.xpath("./td[4]/text()")
            self.ip_info["get_post"] = item.xpath("./td[5]/text()")
            # with open("./ip_kuaidaili.json", "a") as f:
            #     json.dump(self.ip_info, f)
            self.save_ip()
            

    def save_ip(self):
        with open("./ip_kuaidaili.txt", "a") as f:
            f.write(str(self.ip_info) + "\n")


def main():
    start_url = "https://www.kuaidaili.com/proxylist/2"
    free_proxys = FreeProxys(start_url)
    respon = free_proxys.get_html()
    # print(respon)
    free_proxys.parse_html(respon)


if __name__ == '__main__':
    main()
