from redis import StrictRedis, ConnectionPool

"""
q = Queue(maxsize=100)
item = {}
q.put_nowait(item)  # 不等待直接放，队列满的时候会报错
q.put(item)  # 放入数据，队列满的时候回等待
q.get_nowait()  # 不等待直接取，队列空的时候会报错
q.get()  # 取出数据，队列为空的时候会等待
q.qsize()  # 获取队列中现存数据的个数 
q.join()  # 队列中维持了一个计数，计数不为0时候让主线程阻塞等待，队列计数为0的时候才会继续往后执行
q.task_done()  # put的时候计数+1，get不会-1，get需要和task_done 一起使用才会-1
"""

import time
import random
import requests
from queue import Queue
from threading import Thread
from FDC_spider.settings import PROXY_URL, USER_AGENTS


class ProxyIp(object):
    def __init__(self, url):
        self.queue = Queue(maxsize=20)
        self.url = url

    def get_ip(self):
        resp = requests.get(PROXY_URL)
        return resp.content.decode()

    def check_ip(self, proxy_ip):
        headers = {
            'User-Agent': random.choice(USER_AGENTS),
        }
        proxy_dict = {
            "http": "http://hifo:hifo1996@{}:65500".format(proxy_ip),
            "https": "https://hifo:hifo1996@{}:65500".format(proxy_ip),
        }
        try:
            requests.get(self.url, proxies=proxy_dict, headers=headers, timeout=1)
        except Exception as e:
            return False
        else:
            return True

    def queue_clear(self, seconds=60):
        while True:
            time.sleep(seconds)
            self.queue.queue.clear()

    def queue_put(self):
        while True:
            time.sleep(0.01)
            if not self.queue.full():
                proxy_ip = self.get_ip()
                if self.check_ip(proxy_ip):
                    self.queue.put(proxy_ip)

    def queue_get(self):
        return self.queue.get()

    def run(self, t_num=5):
        for _ in range(t_num):
            t1 = Thread(target=self.queue_put, daemon=True)
            t1.start()
        t2 = Thread(target=self.queue_clear, daemon=True)
        t2.start()


if __name__ == '__main__':
    url = 'http://www.gyfc.net.cn/2_proInfo/index.aspx'
    pi = ProxyIp(url)
    pi.run()
    while True:
        time.sleep(0.01)
        proxy_ip = pi.queue_get()
        headers = {
            'User-Agent': random.choice(USER_AGENTS),
        }
        proxy_dict = {
            "http": "http://hifo:hifo1996@{}:65500".format(proxy_ip),
            "https": "https://hifo:hifo1996@{}:65500".format(proxy_ip)
        }
        try:
            requests.get('http://www.gyfc.net.cn/2_proInfo/index.aspx', proxies=proxy_dict, headers=headers, timeout=5)
            print(proxy_ip, 'ok', pi.queue.qsize())
        except:
            pass
