# encoding: utf-8
"""
@author: 夏洛
@QQ: 1972386194
@file: 06-进程池.py
"""





from multiprocessing import Pool
import requests
def scrape(url):
    try:
        requests.get(url)
        print(f'URL {url} Scraped')
    except requests.ConnectionError:
        print(f'URL {url} not Scraped')

if __name__ == '__main__':
    pool = Pool(processes=3)
    urls = [
        'https://www.baidu.com',
        'http://www.meituan.com/',
        'http://blog.csdn.net/',
        'http://xxxyxxx.net'
    ]
    pool.map(scrape, urls)
    pool.close()