# proxy manager
import pymysql
from scrapy.conf import  settings
import requests
import threading
from queue import Queue

# 获取所有代理信息
def get_all_proxy(proxy_q,cursor):
    sql = 'select * from py09_ip'
    row = cursor.execute(sql)  # 返回查询行数
    data = cursor.fetchall()  # 返回[(0,'192,168.1.1',6666),(),()]
    for proxy in data:
        proxy_q.put(proxy)

class ProxyManager(threading.Thread):
    def __init__(self,proxy_q,cursor,conn,lock):
        # 调用父类初始化方法
        super(ProxyManager, self).__init__()
        self.cursor = cursor
        self.proxy_q = proxy_q
        self.conn = conn
        self.lock = lock

    def run(self):
        self.filter_proxy()

    # 过滤代理
    def filter_proxy(self):
        while not self.proxy_q.empty():
            pro = self.proxy_q.get()
            url = 'https://www.baidu.com/s?wd=ip'

            proxy = {
                'http' : 'http://%s:%s' % (pro[1],pro[2]),
                'https' : 'http://%s:%s' % (pro[1],pro[2])
            }
            try:
                response = requests.get(url,proxies=proxy,timeout=12)
                if 200 <= response.status_code <= 300:
                    if '本机IP' in response.text:
                        print(pro[1],'正常')
                    else:
                        # 删除代理
                        with self.lock:
                            self.drop_proxy(pro)
                else:
                    # 删除代理
                    with self.lock:
                        self.drop_proxy(pro)
            except Exception as e:
                print(e)
                # 删除代理
                with self.lock:
                    self.drop_proxy(pro)

    def drop_proxy(self,pro):
        sql = 'delete from py09_proxy where id=%d' % pro[0]
        try:
            row = self.cursor.execute(sql)
            self.conn.commit()
            print(pro[1],'被删除了')
        except Exception as e:
            print(e)


def main():
    # 初始化信息
    my = settings['MYSQL']
    conn = pymysql.connect('127.0.0.1', 'root', '123456','temp', charset='utf8')
    cursor = conn.cursor()

    lock = threading.Lock()

    proxy_q = Queue()  # 初始化代理队列
    get_all_proxy(proxy_q,cursor)  # 向代理队列填充代理

    # 开启多线程
    t_list = []
    for i in range(2):
        t = ProxyManager(proxy_q,cursor,conn,lock)
        t.start()
        t_list.append(t)
    t_list = [t.join() for t in t_list] # 等待所有线程运行完毕

    cursor.close()
    conn.close()

if __name__ == '__main__':
    main()

