import requests
from pymysql import connect
from lxml import etree


class ProxyIpSpider:

    def __init__(self):
        self.headers = {
            "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36"
        }
        self.conn = connect(host='localhost', port=3306, user='root', password='root',
                            database='boss_spider', charset='utf8')
        self.cursor = self.conn.cursor()
        self.domain = 'http://www.xicidaili.com'

    # 抓取代理ip
    def crawl_ips(self):
        next_url = 'http://www.xicidaili.com/wn/'
        while next_url is not None:
            html_str = requests.get(next_url, headers=self.headers).content.decode()
            element = etree.HTML(html_str)
            ip_tr_list = element.xpath('//table[@id="ip_list"]/tr')
            del ip_tr_list[0]
            sql = 'insert into proxy_ip (addr,port,type) values '
            for tr in ip_tr_list:
                ip_addr = tr.xpath('./td[2]/text()')[0]
                ip_port = tr.xpath('./td[3]/text()')[0]
                ip_type = 'https' if tr.xpath('./td[6]/text()')[0] == 'HTTPS' else 'http'
                sql += f"('{ip_addr}',{ip_port},'{ip_type}'),"
            sql = sql[:len(sql)-1]
            self.cursor.execute(sql)
            self.conn.commit()
            # 检测下一页
            next_url = element.xpath('//a[text()="下一页 ›"]/@href')
            print(next_url)
            next_url = self.domain + next_url[0] if len(next_url) > 0 else None

    # 从数据库中删除无效的ip
    def delete_ip(self, ip_addr):
        delete_sql = "delete from proxy_ip where addr='{}'".format(ip_addr)
        self.cursor.execute(delete_sql)
        self.conn.commit()

    # 判断ip是否可用
    def judge_ip(self, ip_addr, ip_port, ip_type):
        proxy_url = '{}://{}:{}'.format(ip_type, ip_addr, ip_port)
        try:
            proxy_dict = {}
            if ip_type == 'http':
                proxy_dict['http'] = proxy_url
                http_url = 'http://www.baidu.com/'
            else:
                proxy_dict['https'] = proxy_url
                http_url = 'https://www.baidu.com/'
            response = requests.get(http_url, proxies=proxy_dict)
        except Exception as e:
            print("无效IP")
            self.delete_ip(ip_addr)
            return False
        else:
            code = response.status_code
            if code >= 200 and code < 300:
                print("有效IP")
                return True
            else:
                print("无效IP")
                self.delete_ip(ip_addr)
                return False

    # 从数据库中随机获取一个可用的ip
    def get_random_ip(self, type='http'):
        random_sql = "SELECT * FROM proxy_ip WHERE type='{}' ORDER BY RAND() LIMIT 1".format(type)
        # random_sql = "SELECT * FROM proxy_ip WHERE type='{}' LIMIT 1".format(type)
        self.cursor.execute(random_sql)
        for ip_info in self.cursor.fetchall():
            ip_addr = ip_info[1]
            ip_port = ip_info[2]
            ip_type = ip_info[3]
            if self.judge_ip(ip_addr, ip_port, ip_type):
                proxy_url = '{}://{}:{}'.format(ip_type, ip_addr, ip_port)
                proxy_dict = {}
                if ip_type == 'http':
                    proxy_dict['http'] = proxy_url
                else:
                    proxy_dict['https'] = proxy_url
                return proxy_dict
            else:
                return self.get_random_ip(type=type)


if __name__ == "__main__":
    ip = ProxyIpSpider()
    proxy = ip.get_random_ip(type='https')
    print(proxy)
    """
    {'https':'https://113.240.226.164:8080'}
    {'https':'https://119.188.162.165:8081'}
    """


