我在尝试使用python的多进程时遇到了进程无法正常退出的问题。具体代码如下,环境为python2.7, debian。
# -*-coding:UTF-8 -*-
import multiprocessing as mp
from multiprocessing import Queue
import BeautifulSoup as bs4
from Queue import Empty
import urllib
import json
'''
getPageData 类似于生产者,获取工作列表。 getDetail 类似消费者,去获取工作的详细信息。queue 中存放工作详情页面的ID
'''
interface = 'http://www.lagou.com/jobs/positionAjax.json?px=default&yx=10k-15k&needAddtionalResult=false'
detailUrl = 'http://www.lagou.com/jobs/{0}.html'
def getPageData(task, queue, keyword='python'):
while True:
try:
page = task.get(timeout=1)
except Empty:
break
post_data = {'kd': keyword, 'pn': page, 'first': 'false'}
opener = urllib.urlopen(interface, urllib.urlencode(post_data))
jsonData = json.loads(opener.read())
results = jsonData['content']['positionResult']['result']
for result in results:
queue.put(result['positionId'])
def getDetail(queue, result):
while True:
try:
positionId = queue.get(timeout=1)
except Empty:
print mp.current_process().name + 'exit'
break
url = detailUrl.format(positionId)
print url, mp.current_process().name
opener = urllib.urlopen(url)
html = opener.read()
soup = bs4.BeautifulSoup(html)
content = soup.findAll(attrs={"class": "job_bt"})[0]
result.put('{0}\n{1}'.format(detailUrl.format(positionId), content))
def start(keyword='python'):
task = Queue()
queue = Queue()
result = Queue()
post_data = {'kd': keyword, 'pn': 1, 'first': 'true'}
opener = urllib.urlopen(interface, urllib.urlencode(post_data))
jsonData = json.loads(opener.read())
# 页数
totalCount = jsonData['content']['positionResult']['totalCount']
resultSize = jsonData['content']['positionResult']['resultSize']
pageNums = totalCount / resultSize
if totalCount % resultSize:
pageNums += 1
results = jsonData['content']['positionResult']['result']
for r in results:
queue.put(r['positionId'])
# 调试前三页
pageNums = 3
for i in range(2, pageNums + 1):
task.put(i)
num_consumers = mp.cpu_count()
processes = [mp.Process(target=getDetail, args=(queue, result))
for _ in range(num_consumers)]
processes.append(mp.Process(target=getPageData, args=(task, queue)))
for p in processes:
p.start()
for p in processes:
p.join()
print 'processes over'
with open('jobs', 'w+') as f:
while not result.empty():
a = result.get()
f.write(a)
if __name__ == '__main__':
start()
运行结果如下:
# 省略n行...
http://www.lagou.com/jobs/2233028.html Process-3
http://www.lagou.com/jobs/512126.html Process-1
http://www.lagou.com/jobs/2221983.html Process-2
Process-4exit
Process-3exit
Process-1exit
Process-2exit
脚本在此处卡住,无法退出
同样的这个模型我在多线程下运行正常,可以正常退出。
应该不存在死锁....不太清楚多进程下为什么无法正常退出。
尝试使用了线程池的Queue 发现居然好了
# 将result = Queue() 替换
manager = mp.Manager()
result = manager.Queue()
更不明白了
試試