import requests
import mysql.connector.pooling
from lxml import etree

import time
import random
from concurrent.futures import ThreadPoolExecutor

pool = mysql.connector.pooling.MySQLConnectionPool(
    **{
        "host": "localhost",
        "port": 3306,
        "user": "root",
        "password": "123456",
        "database": "test",
    },
    pool_size=10,
)


class Crawl_KuaiDaiLi_Ip:
    __th = None
    __start_url = "https://www.kuaidaili.com/free/inha/265"
    __header = {
        "User-Agent": "ozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/102.0.0.0 Safari/537.36"
    }

    def __init__(self):
        try:
            self.__th = ThreadPoolExecutor(max_workers=10)
        except Exception as e:
            print(e.args)

    def spider(self, url):
        try:
            res = requests.get(url, headers=self.__header)
            self.__th.submit(self.parse(html=res.text))
            return res.text
        except Exception as e:
            print(e.args)

    def parse(self, html):
        xpath_html = etree.HTML(html)
        ip_item_list = xpath_html.xpath("//table/tbody/tr")
        has_more = xpath_html.xpath(
            "//ul/li[@class='v3__pagination-next v3__pagination-disabled']"
        )
        page_number = xpath_html.xpath(
            "//ul/li[@class='v3__pagination-item v3__pagination-item-active']/@data-page"
        )[0]

        ip_datas = []
        for ip_item in ip_item_list:
            ip_dict = {
                "ip": ip_item.xpath("./td[1]/text()")[0]
                if len(ip_item.xpath("./td[1]/text()")) > 0
                else None,
                "port": ip_item.xpath("./td[2]/text()")[0]
                if len(ip_item.xpath("./td[2]/text()")) > 0
                else None,
                "type": ip_item.xpath("./td[4]/text()")[0]
                if len(ip_item.xpath("./td[4]/text()")) > 0
                else None,
                "speed": float(ip_item.xpath("./td[6]/text()")[0])
                if len(ip_item.xpath("./td[6]/text()")) > 0
                else None,
            }
            ip_datas.append(ip_dict)

        self.__th.submit(self.pipeline(data=ip_datas))
        if not has_more:
            time.sleep(random.uniform(0, 0.3))
            url = f"https://www.kuaidaili.com/free/inha/{int(page_number)+1}"
            print("继续爬取下一页 ~~ ", url)
            self.__th.submit(self.spider(url))
        else:
            print("爬取完成 ~~ ")
        return ip_datas

    def pipeline(self, data):
        try:
            con = pool.get_connection()
            con.start_transaction()
            cursor = con.cursor()
            sql = "INSERT INTO proxy_ip (ip, port, type, speed) VALUES "
            for ip_info in data:
                sql += f"('{ip_info['ip']}','{ip_info['port']}','{ip_info['type']}',{ip_info['speed']}),"
            else:
                sql = sql[:-1] + ";"
            cursor.execute(sql)
            con.commit()
        except Exception as e:
            con.rollback()
            print(e.args)
        finally:
            con.close()

    def run(self):
        html = self.spider(self.__start_url)


class GetIp(object):
    def test_connection(self, proxy_ip) -> bool:
        try:
            res = requests.get(
                "http://baidu.com", proxies={"http": proxy_ip}, timeout=1
            )
        except Exception as e:
            print(e.args)
            return False
        else:
            if res.status_code >= 200 and res.status_code < 300:
                return True
            else:
                return False

    def delete_ip(self, ip):
        try:
            con = pool.get_connection()
            con.start_transaction()
            cursor = con.cursor()
            cursor.execute("DELETE FROM proxy_ip WHERE ip=%s", [ip])
            con.commit()
        except Exception as e:
            print(e.args)
        finally:
            self.con.close()

    def get_rand_ip(self):
        try:
            # con = pool.get_connection()
            cursor = self.con.cursor()
            cursor.execute("SELECT ip,port,type FROM proxy_ip ORDER BY RAND() LIMIT 1")
            ip_info = cursor.fetchone()

            proxy_ip = f"{ip_info[2].lower()}://{ip_info[0]}:{ip_info[1]}"
            is_con = self.test_connection(proxy_ip)
            if is_con:
                return proxy_ip
            else:
                print("delete: ", ip_info[0])
                self.delete_ip(ip_info[0])
                self.get_rand_ip()

        except Exception as e:
            print(e.args)

        finally:
            self.con.close()


if __name__ == "__main__":
    # kuaidaili_ip = Crawl_KuaiDaiLi_Ip()
    # kuaidaili_ip.run()
    get_ip = GetIp()
    # get_ip.test_connection("http://120.23.3.169:80")
    print(get_ip.get_rand_ip())
