# -*- coding: utf-8 -*-
"""
@Time: 12/9/2018 2:16 PM
@Author: hejing
@Email: 2010jing@gmail.com
"""

# 代码来自廖雪峰博客
from multiprocessing import Pool
import os, time, random
import pymysql
import time, requests
from concurrent.futures import ProcessPoolExecutor


def long_time_task(name):
	print('Run task %s (%s)...' % (name, os.getpid()))
	start = time.time()
	time.sleep(random.random() * 3)
	end = time.time()
	print('Task %s runs %0.2f seconds.' % (name, (end - start)))


def data_handler(urls):
    conn = pymysql.connect(host='120.79.254.19',user='root',password='zsdx2016',database='hj',charset='utf8')
    cursor = conn.cursor()
    print(urls[0])
    print(urls[1])
    for i in range(urls[0],urls[1]):
        sql = 'insert into testsql(name,email,password) values(%s,concat(%s,"demo","@uic.edu.hk"),%s);'
        print(sql)
        res = cursor.execute(sql,[i,"root",i])
        conn.commit()
    cursor.close()
    conn.close()


def run():
    urls = [(1,1000000),(1000001,2000000),(2000001,3000000),(3000001,4000000),(4000001,5000000),
            (5000001, 6000000), (6000001, 7000000), (7000001, 8000000), (8000001, 9000000), (9000001, 10000000)
            ]
    with ProcessPoolExecutor() as excute:
        excute.map(data_handler,urls)
        ##ProcessPoolExecutor 提供的map函数，可以直接接受可迭代的参数，并且结果可以直接for循环取出

if __name__ == '__main__':
	print('Parent process %s.' % os.getpid())
	p = Pool(4)
	for i in range(5):
		p.apply_async(long_time_task, args=(i,))
	print('Waiting for all subprocesses done...')
	p.close()
	p.join()
	print('All subprocesses done.')
