if __name__ == '__main__': for item in ['A', 'B', 'C', 'D']: p = Process(target=func, args=(item,)) #p.daemon = True # 守护进程 p.start() p.join() # 阻塞等待 #p.terminate() # 终止进程 # PID-1684: A # PID-4112: B # PID-15832: C # PID-5188: D
deffunc2(args): # multiple parameters (arguments) # x, y = args x = args[0] # write in this way, easier to locate errors y = args[1] # write in this way, easier to locate errors
time.sleep(1) # pretend it is a time-consuming operation return x - y
defrun__pool(): # main process from multiprocessing import Pool
'''Another way (I don't recommend) Using 'functions.partial'. See https://stackoverflow.com/a/25553970/9293137 from functools import partial # from functools import partial # pool.map(partial(f, a, b), iterable) '''
if __name__ =='__main__': run__pool()
#3 ProcessPoolExecutor from concurrent.futures import ProcessPoolExecutor, as_completed import time
defsquare(n): time.sleep(1) return n * n
if __name__ == '__main__': with ProcessPoolExecutor(max_workers=3) as executor: tasks = [executor.submit(square, num) for num inrange(10)] for future in as_completed(tasks): print(future.result()) # 0 1 4 9 16 25 36 49 64 81
##1 Queue from multiprocessing import Process, Queue import os
defwrite(q): for value in ['A', 'B', 'C', 'D']: print('Process %s put %s to queue.' % (os.getpid(), value)) q.put(value) defread(q): whileTrue: value = q.get(True) print('Process %s get %s from queue.' % (os.getpid(), value))
if __name__ == '__main__': q = Queue(maxsize=4) # 父进程创建Queue,并传给各个子进程 q.qsize() pw = Process(target=write, args=(q,)) pr = Process(target=read, args=(q,)) pw.start() pr.start() pw.join() # 等待pw结束 pr.terminate() # pr进程是死循环,强行终止 # Process 16304 put A to queue. # Process 16304 put B to queue. # Process 16304 put C to queue. # Process 14676 get A from queue. # Process 16304 put D to queue. # Process 14676 get B from queue. # Process 14676 get C from queue. # Process 14676 get D from queue.