# 案例
import requests
import gevent
from gevent import monkey

# 偷换时间
monkey.patch_all()


def download(url):
    res = requests.get(url, timeout=5)
    content = res.text
    print('从{}网站，获取到的数据长度是:{}'.format(url, len(content)))


if __name__ == '__main__':
    urls = ['http://www.163.com',  'http://www.google.com','http://www.baidu.com', 'http://192.168.150.235']
    glist = []
    for i in urls:
        g = gevent.spawn(download, i)
        glist.append(g)
        # g.join()
    gevent.joinall(glist)
