import pymysql
import requests
from lxml import etree
import concurrent.futures

headers = {
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36'}


def mysql_db():
    conn = pymysql.connect(
        host='101.132.157.214',
        port=3306,
        database='alarm',
        charset='utf8',
        user='alarm',
        password='Tian201404293'
    )
    return conn


# 获取代理ip1
def get_proxies_66ip():
    proxies = []
    for i in range(1, 10):  # 获取前4页
        url = f'http://www.66ip.cn/{i}.html'
        response = requests.get(url, headers=headers)
        html = etree.HTML(response.text)
        ips = html.xpath('//div[@align="center"]/table/tr[position()>1]/td[1]/text()')
        ports = html.xpath('//div[@align="center"]/table/tr[position()>1]/td[2]/text()')
        proxies += ['http://' + ip + ':' + port for ip, port in zip(ips, ports)]
    return proxies


# 获取代理ip2'l
def get_proxies_89ip():
    proxies = []
    for i in range(1, 10):  # 获取前4页
        url = f'https://www.89ip.cn/index_{i}.html'
        response = requests.get(url, headers=headers)
        html = etree.HTML(response.text)
        ips = html.xpath('//table[@class="layui-table"]/tbody/tr/td[1]/text()')
        ports = html.xpath('//table[@class="layui-table"]/tbody/tr/td[2]/text()')
        proxies += ['http://' + ip.strip() + ':' + port.strip() for ip, port in zip(ips, ports)]
    return proxies


# 获取代理ip3
def get_proxies_ip3366():
    proxies = []
    for i in range(1, 10):  # 获取前4页
        url = f'https://proxy.ip3366.net/free/?stype=1&page={i}'
        response = requests.get(url, headers=headers)
        html = etree.HTML(response.text)
        ips = html.xpath('//div[@id="list"]/table/tbody/tr/td[1]/text()')
        ports = html.xpath('//div[@id="list"]/table/tbody/tr/td[2]/text()')
        proxies += ['http://' + ip.strip() + ':' + port.strip() for ip, port in zip(ips, ports)]
    return proxies


# 获取去重后的代理ip
def get_all_proxies():
    proxies = get_proxies_66ip()
    proxies += get_proxies_89ip()
    proxies += get_proxies_ip3366()
    return list(set(proxies))  # 使用 set 去除重复的代理


# 获取有效代理ip
def verify_proxy(proxy):
    url = 'http://httpbin.org/ip'
    try:
        response = requests.get(url, headers=headers, proxies={'http': proxy, 'https': proxy}, timeout=2)
        if response.status_code == 200:
            return proxy
    except requests.exceptions.RequestException:
        pass
    return None


# 检查并删除无效的代理ip
def check_and_delete_proxies(conn):
    cur = conn.cursor()
    try:
        cur.execute("SELECT ip FROM proxies")
        proxies = [row[0] for row in cur.fetchall()]
        with concurrent.futures.ThreadPoolExecutor() as executor:
            valid_proxies = list(filter(None, executor.map(verify_proxy, proxies)))
        invalid_proxies = set(proxies) - set(valid_proxies)
        print('啦啦啦，数据库部分ip已经失效，即将从数据库删除，invalid proxies=', invalid_proxies)
        for proxy in invalid_proxies:
            cur.execute("DELETE FROM proxies WHERE ip = %s", (proxy,))
        conn.commit()
    finally:
        cur.close()


# 将有效的代理ip存入数据库
def store_proxies(conn, valid_proxies):
    cur = conn.cursor()
    try:
        for proxy in valid_proxies:
            cur.execute("INSERT IGNORE INTO proxies (ip) VALUES (%s)", (proxy,))
        # 提交当前事务
        conn.commit()
        print('IP存储数据库成功......')
    finally:
        # 关闭数据库连接
        conn.close()


if __name__ == '__main__':
    # 创建DB链接
    conn = mysql_db()

    # 检查并删除无效代理IP
    check_and_delete_proxies(conn)

    # 返回所有可用代理ip
    proxies = get_all_proxies()

    # 并发验证代理IP
    with concurrent.futures.ThreadPoolExecutor() as executor:
        valid_proxies = list(filter(None, executor.map(verify_proxy, proxies)))

    print('本次爬取的可用代理IP有：', valid_proxies)

    # 存储代理ip
    store_proxies(conn, valid_proxies)
