import requests,random, redis


def get_url():
    client = redis.Redis(host='localhost', port=6379, db=0)
    url = client.lpop('urls')
    url = url.decode()
    client.close()
    return url

def save_unspider_url(url):
    client = redis.Redis(host='localhost', port=6379, db=0)
    client.lpush('urls', url)
    client.close()

def request_url():
    try:
        url = get_url()
        res = requests.get(url, timeout=1)
        print(res.json())
    except Exception as e:
        print(f'{url}爬取失败， {e}')
        save_unspider_url(url)


def main():
    while True:
        request_url()
        stop_nu = random.randint(1, 5)
        if stop_nu == 3:
            break

if __name__ == '__main__':
    main()




