from multiprocessing import Process, Lock, Pool, Manager, Pipe
from multiprocessing import Value,Array
import os
from queue import Queue
import time

def fPool(x):
    return x*x

def f(pName):
    print("the current process is "+pName)

def f1(l, i):
    l.acquire()
    try:
        print('hello world', i)
    finally:
        l.release()

def f2(n,a):
    n.value = 3.1415927
    for i in range(len(a)):
        a[i] = -a[i]

def f3(d, l):
    d[1] = '1'
    d['2'] = 2
    d[0.25] = None
    l.reverse()

def success():
    print("success")

def failed():
    print("failed")

def fPipe(conn):
    conn.send([42, None, 'hello'])
    conn.close()

if __name__ == "__main__":
    a=10
    # 1.创建进程
    p1 = Process(target=f, args=('p-1',))
    # 2.启动进程
    p1.start()
    # 3.运行直到停止
    p1.join()

    # 4.进程间同步(加锁)
    lock = Lock()
    for num in range(10):
        #创建了10个进程
        Process(target=f1, args=(lock, num)).start()

    #5.进程间共享状态
    ##5.1 共享内存 Value Array 快但是类型少
    a=Value('d',0.0)
    b=Array('i',range(10))
    p2=Process(target=f2,args=(a,b))
    p3=Process(target=f2,args=(a,b))
    p2.start() 
    p2.join()
    p3.start()
    p3.join()

    print(a.value)
    print(b[:])
    ##5.2 管道 Manager -> 慢 但是类型广泛
    # 各种数据类型 list 、 dict 、 Namespace 、 Lock 、 RLock 、 Semaphore 、
    #  BoundedSemaphore 、 Condition 、 Event 、 Barrier 、 Queue 、 Value 和 Array 

    with Manager() as manager:
        d = manager.dict()
        l = manager.list(range(10))
        p = Process(target=f3, args=(d, l))
        p.start()
        p.join()

        print(d)
        print(l)
    #6.进程间通信
    ##6.1 队列
    # q = Queue()
    # p = Process(target=f, args=(q,))
    # p.start()
    # print(q.get())    # prints "[42, None, 'hello']"
    # p.join()
    ##6.2 管道
    parentCon,childCon=Pipe()
    pipe=Process(target=fPipe,args=(childCon,))
    pipe.start()
    print(parentCon.recv())
    pipe.join()

    #7.进程池
    # start 4 worker processes
    with Pool(processes=4) as pool:
        print(pool.map(fPool,range(10)))
        #print(pool.imap(fPool,range(10)))
        for i in pool.imap_unordered(fPool,range(10)):
            print(i)
        res=pool.apply_async(fPool,(20,))
        print(res.get())

        res = pool.apply_async(os.getpid, ()) # runs in *only* one process
        print(res.get(timeout=1))             # prints the PID of that process

        # launching multiple evaluations asynchronously *may* use more processes
        multiple_results = [pool.apply_async(os.getpid, ()) for i in range(4)]
        print([res.get(timeout=1) for res in multiple_results])
        # make a single worker sleep for 10 secs
        res = pool.apply_async(time.sleep, (10,))
