import redis
import urllib.request as ur
import time

class ProxyPool():
    def __init__(self):
        self.redis_conn = redis.StrictRedis(
            host='localhost',
            port=6379,
            decode_responses=True
        )

    def set_proxy(self):
        # 测试
        # proxy = ur.urlopen(url22).read().decode('utf-8').strip().split('\t')
        # print(proxy)

        url22 = 'http://ip.11jsq.com/index.php/api/entry?method=proxyServer.generate_api_url&packid=1&fa=0&fetch_key=&qty=2&time=1&pro=&city=&port=1&format=txt&ss=4&css=+&dt=1&specialTxt=3&specialJson=&usertype=16'
        proxy_old = None
        while True:
            try:
                #我的代理设置是以txt形式，一次取2个ip,用\t分开，所以读取的时候也是用\t分割，
                proxy_new = ur.urlopen(url22).read().decode('utf-8').strip().split('\t')
                # urlopen是返回一个列表
                if proxy_new != proxy_old:
                    proxy_old = proxy_old
                    self.redis_conn.delete('proxy')
                    self.redis_conn.sadd('proxy',*proxy_new) #打散列表，并放入redis
                    print('更换代理IP：',proxy_new)
                    time.sleep(2)
                else:
                    time.sleep(1)
            except Exception as e:
                print('error=',e)
                pass
    def get_proxy(self):
        proxy_s = self.redis_conn.srandmember('proxy',1)
        #如果刚好删除了proxy的键，取到的值就会为空，所以先判断是否有值，如果刚好删除了，就等待0.1秒
        if proxy_s:
            return proxy_s[0]
        else:
            time.sleep(0.1)
            return self.get_proxy()
if __name__ == '__main__':
    #爬虫需要开启这个代理池来维护
    ProxyPool().set_proxy()