# SuperFastPython.com
# example of parallel map() with the process pool with a larger iterable
from random import random
from time import sleep
from multiprocessing.pool import Pool
 
# task executed in a worker process
def task(identifier):
    # generate a value
    value = random()
    # report a message
    print(f'Task {identifier} executing with {value}', flush=True)
    # block for a moment
    sleep(1)
    # return the generated value
    return value
 
# protect the entry point
if __name__ == '__main__':
    # create and configure the process pool
    with Pool(4) as pool:
        # execute tasks, block until all complete
        pool.map(task, range(40), chunksize=1)
    # process pool is closed automatically