批量生产数据放入队列在批量获取结果

code
import os
import time
import multiprocessing
 
 
# 向Queue中输入数据的函数
def input(queue):
    info = str(os.getpid()) + '(put):' + str(time.asctime())
    queue.put(info)
 
 
def outputQ(queue):
    info = queue.get()
    print(f'{str(os.getpid())}(get):{info}')
 
 
if __name__ == '__main__':
    multiprocessing.freeze_support()
    record1 = []  # store input processes
    record2 = []  # store output processes
    queue = multiprocessing.Queue(3)
 
 
    # 输入进程
    for i in range(10):
        process = multiprocessing.Process(target=input, args=(queue,))
        process.start()
        record1.append(process)
 
    # 输出进程
    for i in range(10):
        process = multiprocessing.Process(target=outputQ, args=(queue,))
        process.start()
        record2.append(process)
    for p in record1:
        p.join()
    for p in record2:
        p.join()
outputs
macname@MacdeMacBook-Pro py % python3 cccccc.py
58107(get):58097(put):Tue Mar 24 18:59:19 2020
58108(get):58098(put):Tue Mar 24 18:59:19 2020
58109(get):58099(put):Tue Mar 24 18:59:19 2020
58110(get):58100(put):Tue Mar 24 18:59:19 2020
58111(get):58101(put):Tue Mar 24 18:59:19 2020
58112(get):58102(put):Tue Mar 24 18:59:19 2020
58113(get):58103(put):Tue Mar 24 18:59:19 2020
58114(get):58104(put):Tue Mar 24 18:59:19 2020
58115(get):58105(put):Tue Mar 24 18:59:19 2020
58116(get):58106(put):Tue Mar 24 18:59:19 2020
macname@MacdeMacBook-Pro py %

 

                         

上一篇:Redis踩坑系列(二)Spark批量Load大量数据到Redis,主从同步问题


下一篇:Linux-npm install命令&脚本命令