import time
import requests
import gevent

from asyncio_ import urls 

def crawl(url):
    text = requests.get(url).text   
    with open('a.json', 'a') as f:
        f.write(text)
    

if __name__ == '__main__':
    start = time.time()

    gevent.joinall([
        gevent.spawn(crawl, url) for url in urls
    ])

    print(time.time() - start)