import requests
import urllib.request
from bs4 import BeautifulSoup
import time
from fake_useragent import UserAgent
import pymysql

HEADER = {
    'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
    'Accept-Encoding': 'gzip, deflate, br',
    'Accept-Language': 'zh-CN,zh;q=0.9',
    'Connection': 'keep-alive',
    'Host': 'www.kuaidaili.com',
    'User-Agent': UserAgent().chrome
}
# 国内免费代理IP网站 快代理
IP_SOURCE_URLS = ['https://www.kuaidaili.com/free/inha/','https://www.kuaidaili.com/free/intr/']
# 校验代理IP有效性url
CHECK_URL = "http://ip.chinaz.com/getip.aspx"

def __get_proxy_list__(page = 1):
    '''获取代理IP列表'''
    proxy_list=[]
    # 获取代理IP列表
    for each_url in IP_SOURCE_URLS:
        url  = each_url + str(page) + '/'
        print('开始爬取[{}]网页代理IP数据列表'.format(url))
        response = requests.get(url, headers=HEADER)
        bs = BeautifulSoup(response.content.decode(errors='ignore'),'lxml')
        ip_elements = bs.select('#list > table > tbody > tr')
        for ip_element in ip_elements:
            td_elements = ip_element.select('tr > td')
            ip = td_elements[0].text.strip()
            port = td_elements[1].text.strip()
            ip_type = td_elements[3].text.strip().lower()
            proxy_list.append([ip_type, ip, port])
        time.sleep(3)
    return proxy_list

def __check_proxy_ip__(ip_type, ip, port):
    # 验证代理IP的可用性
    try:
        proxy_handler = urllib.request.ProxyHandler({ip_type: ip_type + '://' + ip + ':' + port})
        urllib.request.build_opener(proxy_handler).open(CHECK_URL).read()
        print('代理IP[{}]校验成功'.format(ip_type + '://' + ip + ':' + port))
        return True
    except:
        print('代理IP[{}]校验失败'.format(ip_type + '://' + ip + ':' + port))
        return False

def __save__(data):
    conn = pymysql.connect(host='127.0.0.1', port=3306, user='root', password='123456',
                           database="proxy_ip_pool",
                           charset="utf8")
    cursor = conn.cursor()
    sql = 'INSERT INTO IP_INFO(IP_TYPE, IP, PORT) VALUES (%s, %s, %s)'
    cursor.executemany(sql, data)
    conn.commit()
    cursor.close()
    conn.close()

def __delete_ip_info__():
    conn = pymysql.connect(host='127.0.0.1', port=3306, user='root', password='123456',
                           database="proxy_ip_pool",
                           charset="utf8")
    cursor = conn.cursor()
    sql = 'DELETE FROM IP_INFO'
    cursor.execute(sql)
    conn.commit()
    cursor.close()
    conn.close()

class API:
    def __query_list__(self):
        conn = pymysql.connect(host='127.0.0.1', port=3306, user='root', password='123456',
                               database="proxy_ip_pool",
                               charset="utf8")
        cursor = conn.cursor()
        sql = 'SELECT IP_TYPE, IP, PORT FROM IP_INFO'
        cursor.execute(sql)
        result = cursor.fetchall()
        cursor.close()
        conn.close()
        return result
    def __reload_ip_info__(self):
        '''重新加载代理IP数据到数据库'''
        # 清空表数据
        __delete_ip_info__()
        proxy_ip_list = []
        page = 1
        while len(proxy_ip_list) == 0:
            proxy_list = __get_proxy_list__(page)
            for proxy_info in proxy_list:
                if __check_proxy_ip__(proxy_info[0], proxy_info[1], proxy_info[2]) is True:
                    proxy_ip_list.append(proxy_info)
                    time.sleep(3)
            page = page + 1
        # 保存数据
        __save__(proxy_ip_list)
