如何访问在多处理过程中调用的函数的结果?
How to access the result of a function called in a multiprocessing process?
我正在运行这段代码:
import random
import multiprocessing
import time
def list_append(count, id):
out_list = []
for i in range(count):
out_list.append(random.random())
return out_list
if __name__ == "__main__":
t0 = time.clock()
size = 10000000 # Number of random numbers to add
procs = 2 # Number of processes to create
jobs = []
for i in range(0, procs):
process = multiprocessing.Process(target=list_append,args=(size, i))
jobs.append(process)
# Start the processes (i.e. calculate the random number lists)
res=[]
for j in jobs:
r= j.start()
res.append(r)
# Ensure all of the processes have finished
for j in jobs:
j.join()
print "List processing complete."
print time.clock()-t0,"seconds"
不幸的是,在它的最后,res = [None,None]
虽然我希望它填充我在函数 list_append
中填充的列表。
您需要使用data structures that can be shared between processes:
def list_append(count, id, res):
# ^^^
out_list = []
for i in range(count):
out_list.append(random.random())
res[id] = out_list # <------
if __name__ == "__main__":
size = 10000000
procs = 2
manager = multiprocessing.Manager() # <---
res = manager.dict() # <---
jobs = []
for i in range(0, procs):
process = multiprocessing.Process(target=list_append,args=(size, i, res))
# ^^^^
jobs.append(process)
for j in jobs:
r = j.start()
for j in jobs:
j.join()
print "List processing complete."
# now `res` will contain results
正如 avenet 评论的那样,使用 multiprocessing.Pool
会更简单:
def list_append(args):
count, id = args
out_list = []
for i in range(count):
out_list.append(random.random())
return out_list
if __name__ == "__main__":
size = 10000000
procs = 2
pool = multiprocessing.Pool(procs)
res = pool.map(list_append, [(size, i) for i in range(procs)])
pool.close()
pool.join()
print "List processing complete."
# print res
我正在运行这段代码:
import random
import multiprocessing
import time
def list_append(count, id):
out_list = []
for i in range(count):
out_list.append(random.random())
return out_list
if __name__ == "__main__":
t0 = time.clock()
size = 10000000 # Number of random numbers to add
procs = 2 # Number of processes to create
jobs = []
for i in range(0, procs):
process = multiprocessing.Process(target=list_append,args=(size, i))
jobs.append(process)
# Start the processes (i.e. calculate the random number lists)
res=[]
for j in jobs:
r= j.start()
res.append(r)
# Ensure all of the processes have finished
for j in jobs:
j.join()
print "List processing complete."
print time.clock()-t0,"seconds"
不幸的是,在它的最后,res = [None,None]
虽然我希望它填充我在函数 list_append
中填充的列表。
您需要使用data structures that can be shared between processes:
def list_append(count, id, res):
# ^^^
out_list = []
for i in range(count):
out_list.append(random.random())
res[id] = out_list # <------
if __name__ == "__main__":
size = 10000000
procs = 2
manager = multiprocessing.Manager() # <---
res = manager.dict() # <---
jobs = []
for i in range(0, procs):
process = multiprocessing.Process(target=list_append,args=(size, i, res))
# ^^^^
jobs.append(process)
for j in jobs:
r = j.start()
for j in jobs:
j.join()
print "List processing complete."
# now `res` will contain results
正如 avenet 评论的那样,使用 multiprocessing.Pool
会更简单:
def list_append(args):
count, id = args
out_list = []
for i in range(count):
out_list.append(random.random())
return out_list
if __name__ == "__main__":
size = 10000000
procs = 2
pool = multiprocessing.Pool(procs)
res = pool.map(list_append, [(size, i) for i in range(procs)])
pool.close()
pool.join()
print "List processing complete."
# print res