#! /usr/bin/env python
# -*- coding: utf-8 -*-

# *************************************************************
#  Copyright (c) JoinQuant Development Team
#
#  Author: Huayong Kuang <kuanghuayong@joinquant.com>
#  CreateTime: 2017-11-30 13:13:29 Thursday
# *************************************************************

from __future__ import print_function

import time
from multiprocessing.dummy import Pool as ThreadPool
from pprint import pprint

import logging
logging.basicConfig(
    level=logging.INFO,
    format='[%(levelname)1.1s %(asctime)s] %(message)s',
)

def request(url):
    import requests
    resp = requests.get(url, timeout=5)
    resp.raise_for_status()
    return resp.status_code

urls = [
  'http://www.python.org',
  'http://www.python.org/about/',
  'http://www.onlamp.com/pub/a/python/2003/04/17/metaclasses.html',
  'http://www.python.org/doc/',
  'http://www.python.org/download/',
  'http://www.python.org/getit/',
  'http://www.python.org/community/',
  'https://wiki.python.org/moin/',
  'http://planet.python.org/',
  'https://wiki.python.org/moin/LocalUserGroups',
  'http://www.python.org/psf/',
  'http://docs.python.org/devguide/',
  'http://www.python.org/community/awards/',
  'http://www.xxxyyy.xxcom',
  ]

# urls = ["http://konghy.cn"] * 80

time1 = time.time()

# Make the Pool of workers
pool = ThreadPool(4)
# Open the urls in their own threads
# and return the results
try:
    results = pool.map(request, urls)
except Exception as e:
    print(">"*50, e)
    raise
#close the pool and wait for the work to finish
pool.close()
pool.join()
pprint(results)

time2 = time.time()

print("-" * 100)
for url in urls:
    print(request(url))

time3 = time.time()

print(time2 - time1, time3 - time2)
