Python:队列和线程的阻塞问题
Python: Blocking issues with a queue and threads
我遇到了 Python 队列和线程的奇怪问题。
我有一个 web.py 应用程序来安排作业,因此有一个 global incoming_queue = Queue(maxsize=10)
.
我有一个 url 和一个添加到队列的 GET 处理程序(我也添加到一个列表,因为我需要知道队列的内容):
class ProcessRequest:
def GET(self):
global incoming_queue, incoming_jobs
if incoming_queue.full():
print "Queue is full"
return web.InternalError("Queue is full, please try submitting later.")
else:
job_id = getNextInt()
req_folder = "req" + str(job_id)
incoming_queue.put(job_id)
incoming_jobs.append(job_id)
print "Received request, assigning Drop Folder {0}".format(req_folder)
web.header('Drop-Folder', req_folder)
return req_folder
我还 运行 一个线程来处理作业:
def processJobs():
global incoming_queue, incoming_jobs, current_job, finished_jobs
while True:
print "Job processor thread active"
current_job = incoming_queue.get(block=True)
incoming_jobs.remove(current_job)
print "Processing job {0}".format(current_job)
# Do stuff here
print "Job processor thread ready for next job"
print "Job processor thread finished"
我运行启动程序时如下:
if __name__ == '__main__':
job_processor_thread = threading.Thread(target=processJobs)
job_processor_thread.start()
app.run()
然后我调用添加到队列的 URL。使用另一个 url,我能够检查该项目是否确实已添加到列表中,并将以下代码添加到原始 url 处理程序 (print incoming_queue.get()
) 的 GET 方法中,我验证该项目确实被添加到队列中。
作业处理线程刚刚阻塞在 current_job = incoming_queue.get(block=True)
。这是有意的。但是,它永远不会解除阻塞,即使将项目添加到队列中也是如此。它只是永远被阻止。
这是为什么?它几乎就像它有一个单独的队列对象副本。
编辑: 根据 Martin 的建议,我决定尝试查看 GET 方法和 processJobs 方法中引用的对象。
processJobs(): <Queue.Queue instance at 0x7f32b6958a70>
GET(): <Queue.Queue instance at 0x7f32b5ec5368>
是的,它们是不同的,但为什么呢?
编辑 #2:这是供参考的整个脚本:
'''
Created on Apr 20, 2015
@author: chris
'''
import web
import time
import threading
import json
from Queue import Queue, Empty
import os
urls = (
'/request', 'ProcessRequest',
'/status', 'CheckStatus',
)
current_job_thread = threading.Thread()
app = web.application(urls, globals())
incoming_jobs = []
incoming_queue = Queue(maxsize=10)
current_job = None
finished_jobs = []
next_int = 0
def getNextInt():
global next_int, incoming_queue
the_int = next_int
next_int += 1
return the_int
class ProcessRequest:
def GET(self):
global incoming_queue, incoming_jobs
if incoming_queue.full():
print "Queue is full"
return web.InternalError("Queue is full, please try submitting later.")
else:
job_id = getNextInt()
req_folder = "req" + str(job_id)
print incoming_queue
incoming_queue.put(job_id)
incoming_jobs.append(job_id)
print "Received request, assigning Drop Folder {0}".format(req_folder)
web.header('Drop-Folder', req_folder)
return req_folder
class CheckStatus:
def GET(self):
global incoming_queue, incoming_jobs, current_job, finished_jobs
if str(web.input().jobid) == 'all':
# Construct JSON to return
web.header('Content-Type', 'application/json')
return {'In Queue': incoming_jobs,
'Currently Processing': current_job,
'Finished': finished_jobs
}
try:
jobid = int(web.input().jobid)
except ValueError:
jobid = -1
print jobid
if jobid in finished_jobs:
file_string = "results{0}.json".format(jobid)
try:
json_file = open(file_string)
finished_jobs.remove(jobid)
os.remove(file_string)
web.header('Process-Status', 'Complete')
web.header('Content-Type', 'application/json')
return json.load(json_file)
except IOError:
web.header('Process-Status', 'Complete, but failed to retrieve file, saving')
return ""
elif jobid is current_job:
web.header('Process-Status', 'Processing')
elif jobid in incoming_jobs:
web.header('Process-Status', 'In Queue')
else:
web.header('Process-Status', 'Unknown')
return ""
def processJobs():
global incoming_queue, incoming_jobs, current_job, finished_jobs
while True:
print incoming_queue
print "Job processor thread active"
current_job = incoming_queue.get(block=True)
incoming_jobs.remove(current_job)
print "Processing job {0}".format(current_job)
# Do magical Spark stuff here
time.sleep(10) # Simulate a Spark Job
finished_jobs.append(current_job)
current_job = None
print "Job processor thread ready for next job"
print "Job processor thread finished"
if __name__ == '__main__':
job_processor_thread = threading.Thread(target=processJobs)
job_processor_thread.start()
app.run()
您可以简单地通过打印对象来测试您认为它们是不同队列的假设:
def processJobs():
global incoming_queue, incoming_jobs, current_job, finished_jobs
print incoming_queue # print something like <__main__.Queue instance at 0x7f556d93f830>
class ProcessRequest:
def GET(self):
global incoming_queue, incoming_jobs
print incoming_queue # print something like <__main__.Queue instance at 0x7f556d93f830>
确保内存地址 (0x7f556d93f830) 匹配。
您从未提及您是否正在使用框架来处理 Web 请求,因此框架可能正在执行一些分叉,这导致您的队列成为单独的实例。
附带说明一下,您可能希望将 Redis 或 beanstalk 视为队列 - 它们使用起来非常简单,即使您重新启动应用程序,您的队列也会持续存在。
在 Martin 的指导下,我能够使用这里的想法解决问题:https://groups.google.com/forum/#!topic/webpy/u-cfL7jLywo。
基本上,web.py在发出请求时重新创建全局变量,所以如果我们想在框架和其他线程之间共享数据,我们不能使用全局变量。解决方案是创建另一个模块,在该模块中创建一个 class,然后将变量定义添加到那里。这是我最终得到的结果:
jobqueue.py:
'''
Created on Apr 23, 2015
@author: chris
'''
import Queue
class JobManagement:
incoming_queue = Queue.Queue(maxsize=10)
incoming_jobs = []
current_job = None
finished_jobs = []
main.py:
'''
Created on Apr 20, 2015
@author: chris
'''
import web
import time
import threading
import json
from Queue import Queue, Empty
import os
from jobqueue import JobManagement
urls = (
'/request', 'ProcessRequest',
'/status', 'CheckStatus',
)
app = web.application(urls, globals())
next_int = 0
def getNextInt():
global next_int
the_int = next_int
next_int += 1
return the_int
class ProcessRequest:
def GET(self):
if JobManagement.incoming_queue.full():
print "Queue is full"
return web.InternalError("Queue is full, please try submitting later.")
else:
job_id = getNextInt()
req_folder = "req" + str(job_id)
print JobManagement.incoming_queue
JobManagement.incoming_queue.put(job_id)
JobManagement.incoming_jobs.append(job_id)
print "Received request, assigning Drop Folder {0}".format(req_folder)
web.header('Drop-Folder', req_folder)
return req_folder
class CheckStatus:
def GET(self):
if str(web.input().jobid) == 'all':
# Construct JSON to return
web.header('Content-Type', 'application/json')
return {'In Queue': JobManagement.incoming_jobs,
'Currently Processing': JobManagement.current_job,
'Finished': JobManagement.finished_jobs
}
try:
jobid = int(web.input().jobid)
except ValueError:
jobid = -1
print jobid
if jobid in JobManagement.finished_jobs:
file_string = "results{0}.json".format(jobid)
try:
json_file = open(file_string)
JobManagement.finished_jobs.remove(jobid)
os.remove(file_string)
web.header('Process-Status', 'Complete')
web.header('Content-Type', 'application/json')
return json.load(json_file)
except IOError:
web.header('Process-Status', 'Complete, but failed to retrieve file, saving')
return ""
elif jobid is JobManagement.current_job:
web.header('Process-Status', 'Processing')
elif jobid in JobManagement.incoming_jobs:
web.header('Process-Status', 'In Queue')
else:
web.header('Process-Status', 'Unknown')
return ""
def processJobs():
while True:
print JobManagement.incoming_queue
print "Job processor thread active"
JobManagement.current_job = JobManagement.incoming_queue.get(block=True)
JobManagement.incoming_jobs.remove(JobManagement.current_job)
print "Processing job {0}".format(JobManagement.current_job)
# Do magical Spark stuff here
time.sleep(10) # Simulate a Spark Job
JobManagement.finished_jobs.append(JobManagement.current_job)
JobManagement.current_job = None
print "Job processor thread ready for next job"
print "Job processor thread finished"
if __name__ == '__main__':
print JobManagement.incoming_queue
job_processor_thread = threading.Thread(target=processJobs)
job_processor_thread.start()
app.run()
我遇到了 Python 队列和线程的奇怪问题。
我有一个 web.py 应用程序来安排作业,因此有一个 global incoming_queue = Queue(maxsize=10)
.
我有一个 url 和一个添加到队列的 GET 处理程序(我也添加到一个列表,因为我需要知道队列的内容):
class ProcessRequest:
def GET(self):
global incoming_queue, incoming_jobs
if incoming_queue.full():
print "Queue is full"
return web.InternalError("Queue is full, please try submitting later.")
else:
job_id = getNextInt()
req_folder = "req" + str(job_id)
incoming_queue.put(job_id)
incoming_jobs.append(job_id)
print "Received request, assigning Drop Folder {0}".format(req_folder)
web.header('Drop-Folder', req_folder)
return req_folder
我还 运行 一个线程来处理作业:
def processJobs():
global incoming_queue, incoming_jobs, current_job, finished_jobs
while True:
print "Job processor thread active"
current_job = incoming_queue.get(block=True)
incoming_jobs.remove(current_job)
print "Processing job {0}".format(current_job)
# Do stuff here
print "Job processor thread ready for next job"
print "Job processor thread finished"
我运行启动程序时如下:
if __name__ == '__main__':
job_processor_thread = threading.Thread(target=processJobs)
job_processor_thread.start()
app.run()
然后我调用添加到队列的 URL。使用另一个 url,我能够检查该项目是否确实已添加到列表中,并将以下代码添加到原始 url 处理程序 (print incoming_queue.get()
) 的 GET 方法中,我验证该项目确实被添加到队列中。
作业处理线程刚刚阻塞在 current_job = incoming_queue.get(block=True)
。这是有意的。但是,它永远不会解除阻塞,即使将项目添加到队列中也是如此。它只是永远被阻止。
这是为什么?它几乎就像它有一个单独的队列对象副本。
编辑: 根据 Martin 的建议,我决定尝试查看 GET 方法和 processJobs 方法中引用的对象。
processJobs(): <Queue.Queue instance at 0x7f32b6958a70>
GET(): <Queue.Queue instance at 0x7f32b5ec5368>
是的,它们是不同的,但为什么呢?
编辑 #2:这是供参考的整个脚本:
'''
Created on Apr 20, 2015
@author: chris
'''
import web
import time
import threading
import json
from Queue import Queue, Empty
import os
urls = (
'/request', 'ProcessRequest',
'/status', 'CheckStatus',
)
current_job_thread = threading.Thread()
app = web.application(urls, globals())
incoming_jobs = []
incoming_queue = Queue(maxsize=10)
current_job = None
finished_jobs = []
next_int = 0
def getNextInt():
global next_int, incoming_queue
the_int = next_int
next_int += 1
return the_int
class ProcessRequest:
def GET(self):
global incoming_queue, incoming_jobs
if incoming_queue.full():
print "Queue is full"
return web.InternalError("Queue is full, please try submitting later.")
else:
job_id = getNextInt()
req_folder = "req" + str(job_id)
print incoming_queue
incoming_queue.put(job_id)
incoming_jobs.append(job_id)
print "Received request, assigning Drop Folder {0}".format(req_folder)
web.header('Drop-Folder', req_folder)
return req_folder
class CheckStatus:
def GET(self):
global incoming_queue, incoming_jobs, current_job, finished_jobs
if str(web.input().jobid) == 'all':
# Construct JSON to return
web.header('Content-Type', 'application/json')
return {'In Queue': incoming_jobs,
'Currently Processing': current_job,
'Finished': finished_jobs
}
try:
jobid = int(web.input().jobid)
except ValueError:
jobid = -1
print jobid
if jobid in finished_jobs:
file_string = "results{0}.json".format(jobid)
try:
json_file = open(file_string)
finished_jobs.remove(jobid)
os.remove(file_string)
web.header('Process-Status', 'Complete')
web.header('Content-Type', 'application/json')
return json.load(json_file)
except IOError:
web.header('Process-Status', 'Complete, but failed to retrieve file, saving')
return ""
elif jobid is current_job:
web.header('Process-Status', 'Processing')
elif jobid in incoming_jobs:
web.header('Process-Status', 'In Queue')
else:
web.header('Process-Status', 'Unknown')
return ""
def processJobs():
global incoming_queue, incoming_jobs, current_job, finished_jobs
while True:
print incoming_queue
print "Job processor thread active"
current_job = incoming_queue.get(block=True)
incoming_jobs.remove(current_job)
print "Processing job {0}".format(current_job)
# Do magical Spark stuff here
time.sleep(10) # Simulate a Spark Job
finished_jobs.append(current_job)
current_job = None
print "Job processor thread ready for next job"
print "Job processor thread finished"
if __name__ == '__main__':
job_processor_thread = threading.Thread(target=processJobs)
job_processor_thread.start()
app.run()
您可以简单地通过打印对象来测试您认为它们是不同队列的假设:
def processJobs():
global incoming_queue, incoming_jobs, current_job, finished_jobs
print incoming_queue # print something like <__main__.Queue instance at 0x7f556d93f830>
class ProcessRequest:
def GET(self):
global incoming_queue, incoming_jobs
print incoming_queue # print something like <__main__.Queue instance at 0x7f556d93f830>
确保内存地址 (0x7f556d93f830) 匹配。
您从未提及您是否正在使用框架来处理 Web 请求,因此框架可能正在执行一些分叉,这导致您的队列成为单独的实例。
附带说明一下,您可能希望将 Redis 或 beanstalk 视为队列 - 它们使用起来非常简单,即使您重新启动应用程序,您的队列也会持续存在。
在 Martin 的指导下,我能够使用这里的想法解决问题:https://groups.google.com/forum/#!topic/webpy/u-cfL7jLywo。
基本上,web.py在发出请求时重新创建全局变量,所以如果我们想在框架和其他线程之间共享数据,我们不能使用全局变量。解决方案是创建另一个模块,在该模块中创建一个 class,然后将变量定义添加到那里。这是我最终得到的结果:
jobqueue.py:
'''
Created on Apr 23, 2015
@author: chris
'''
import Queue
class JobManagement:
incoming_queue = Queue.Queue(maxsize=10)
incoming_jobs = []
current_job = None
finished_jobs = []
main.py:
'''
Created on Apr 20, 2015
@author: chris
'''
import web
import time
import threading
import json
from Queue import Queue, Empty
import os
from jobqueue import JobManagement
urls = (
'/request', 'ProcessRequest',
'/status', 'CheckStatus',
)
app = web.application(urls, globals())
next_int = 0
def getNextInt():
global next_int
the_int = next_int
next_int += 1
return the_int
class ProcessRequest:
def GET(self):
if JobManagement.incoming_queue.full():
print "Queue is full"
return web.InternalError("Queue is full, please try submitting later.")
else:
job_id = getNextInt()
req_folder = "req" + str(job_id)
print JobManagement.incoming_queue
JobManagement.incoming_queue.put(job_id)
JobManagement.incoming_jobs.append(job_id)
print "Received request, assigning Drop Folder {0}".format(req_folder)
web.header('Drop-Folder', req_folder)
return req_folder
class CheckStatus:
def GET(self):
if str(web.input().jobid) == 'all':
# Construct JSON to return
web.header('Content-Type', 'application/json')
return {'In Queue': JobManagement.incoming_jobs,
'Currently Processing': JobManagement.current_job,
'Finished': JobManagement.finished_jobs
}
try:
jobid = int(web.input().jobid)
except ValueError:
jobid = -1
print jobid
if jobid in JobManagement.finished_jobs:
file_string = "results{0}.json".format(jobid)
try:
json_file = open(file_string)
JobManagement.finished_jobs.remove(jobid)
os.remove(file_string)
web.header('Process-Status', 'Complete')
web.header('Content-Type', 'application/json')
return json.load(json_file)
except IOError:
web.header('Process-Status', 'Complete, but failed to retrieve file, saving')
return ""
elif jobid is JobManagement.current_job:
web.header('Process-Status', 'Processing')
elif jobid in JobManagement.incoming_jobs:
web.header('Process-Status', 'In Queue')
else:
web.header('Process-Status', 'Unknown')
return ""
def processJobs():
while True:
print JobManagement.incoming_queue
print "Job processor thread active"
JobManagement.current_job = JobManagement.incoming_queue.get(block=True)
JobManagement.incoming_jobs.remove(JobManagement.current_job)
print "Processing job {0}".format(JobManagement.current_job)
# Do magical Spark stuff here
time.sleep(10) # Simulate a Spark Job
JobManagement.finished_jobs.append(JobManagement.current_job)
JobManagement.current_job = None
print "Job processor thread ready for next job"
print "Job processor thread finished"
if __name__ == '__main__':
print JobManagement.incoming_queue
job_processor_thread = threading.Thread(target=processJobs)
job_processor_thread.start()
app.run()