import time
import scrapy_redis
import redis
import urllib.request as ur
class ipPool():
    def __init__(self):
        self.redis_conn = redis.StrictRedis(
            host='localhost',
            port=6379,
            decode_responses=True
        )
    def set_proxy(self):
        proxy_odd = None
        while True:
            proxy_new = ur.urlopen('http://api.ip.data5u.com/dynamic/get.html?order=140.250.82.224:27018&sep=3').read().decode('utf-8')
            if proxy_new != proxy_odd:
                proxy_odd = proxy_new
                self.redis_conn.delete('proxy')
                self.redis_conn.sadd('proxy', *proxy_new)      # 实现代理ip更新
                print('更换代理ip为:%s' % proxy_new)
                time.sleep(2)
            else:
                time.sleep(1)
    def get_proxy(self):
        ret = self.redis_conn.srandmember('proxy',1)
        if ret:
            return ret[0]
        else:
            time.sleep(0.1)
            return self.get_proxy()
        print(ret)
if __name__ == '__main__':
    ipPool.get_proxy()
