import asyncio
import random
import redis
import aiohttp

def get_request_urls():
    client = redis.Redis(host='localhost', port=6379, db=0)
    urls = client.lrange("request_urls", 0, -1)
    client.close()
    return [url.decode('utf-8') for url in urls]


def request_urls_record(url):
    client = redis.Redis(host='localhost', port=6379, db=0)
    client.lpush("request_urls", url)
    client.close()

async def fetch(session, url):
    now_urls = get_request_urls()
    if url in now_urls:
        print("重复爬取")
    else:
        request_urls_record(url)
        async with session.get(url) as response:
            print(response.url)
            response_json = await response.json()
            print(response_json)

async def main():
    async with aiohttp.ClientSession() as session:
        urls = [f"https://httpbin.org/get?age={random.randint(1,20)}" for i in range(100)]
        tasks = [fetch(session, url) for url in urls]
        await asyncio.gather(*tasks)

if __name__ == '__main__':
    asyncio.run(main())