python 队列Queue

from Queue import Queue

qlist=Queue(maxsize=300)
import threading

for i in range(20):
    qlist.put("hello num%d" %(i))

def process_work(qlist):
    while True:
        val=qlist.get()
        print val + "\n"
        qlist.task_done()
        qlist.join()


workers = [threading.Thread(target=process_work, args=(qlist,)),
        threading.Thread(target=process_work, args=(qlist,))
        ]

threads = []

for i in range(20):
    task=threading.Thread(target=process_work, args=(qlist,))
    threads.append(task)



for w in threads:
    w.setDaemon(True)
    w.start();

qlist.join()


# def worker():
#     while True:
#         item = q.get()
#         do_work(item)
#         q.task_done()

# q = Queue()
# for i in range(num_worker_threads):
#      t = Thread(target=worker)
#      t.daemon = True
#      t.start()

# for item in source():
#     q.put(item)

# q.join()

#print "hello {} your age {}".format("zhangsan",18)

# import Queue
# import threading

# class Job(object):
#     def __init__(self, priority, description):
#         self.priority = priority
#         self.description = description
#         print 'Job:',description
#         return
#     def __cmp__(self, other):
#         return cmp(self.priority, other.priority)

# q = Queue.PriorityQueue()

# q.put(Job(3, 'level 3 job'))
# q.put(Job(10, 'level 10 job'))
# q.put(Job(1, 'level 1 job'))

# def process_job(q):
#     while True:
#         next_job = q.get()
#         print 'for:', next_job.description
#         q.task_done()

# workers = [threading.Thread(target=process_job, args=(q,)),
#         threading.Thread(target=process_job, args=(q,))
#         ]

# for w in workers:
#     w.setDaemon(True)
#     w.start()

# q.join()

 

上一篇:运行Scrapy工程,报错ModuleNotFoundError: No module named 'win32api'解决方法


下一篇:Qt容器类总结