无法腌制 python 中的 'weakref' 对象
cannot pickle 'weakref' object in python
我正在尝试运行下面的代码,它的主要目的是做更复杂的程序,我用两个循环简化了它。
我不断收到以下错误:(我没有找到错误)
from multiprocessing import Process
def load():
for i in range(0, 10000):
print("loadddinngg", i)
def copy(p1):
# fetch all files
while p1.is_alive():
for i in range(0, 100000):
print("coppppyyy", i)
class multithreading:
def __init__(self):
p1 = Process(target=load, args=())
p2 = Process(target=copy, args=( p1,))
p1.start()
p2.start()
p1.join()
p2.join()
File "C:\Users\untitled10\toDelete.py", line 19, in __init__
p2.start()
File "C:\Program Files\Python38_64bit\lib\multiprocessing\process.py", line 121, in start
self._popen = self._Popen(self)
File "C:\Program Files\Python38_64bit\lib\multiprocessing\context.py", line 224, in _Popen
return _default_context.get_context().Process._Popen(process_obj)
File "C:\Program Files\Python38_64bit\lib\multiprocessing\context.py", line 327, in _Popen
return Popen(process_obj)
File "C:\Program Files\Python38_64bit\lib\multiprocessing\popen_spawn_win32.py", line 93, in __init__
reduction.dump(process_obj, to_child)
File "C:\Program Files\Python38_64bit\lib\multiprocessing\reduction.py", line 60, in dump
ForkingPickler(file, protocol).dump(obj)
TypeError: cannot pickle 'weakref' object
进程对象的 args
被 pickle 以便将该信息发送到新进程(以便它可以在其自己的地址 Python 中解压并重新创建相同的 Python 对象=]).进程对象本身不能被 pickle。
另外,as stated in the documentation:
Note that the start(), join(), is_alive(), terminate() and exitcode methods should only be called by the process that created the process object.
因此我们不能指望在生成的进程中使用 is_alive
。
要在进程之间进行通信,请使用 multiprocessing.Queue
。对于这种特定情况,我们希望 p2
在 p1
完成后停止。这意味着,p1
必须 告诉 p2
它 即将 完成,然后 p2
收到该消息并通过停止响应。
一个简单的例子可能如下所示:
from multiprocessing import Process, Queue
from queue import Empty # exception raised when the queue is empty
import sys
def load(q):
do_some_work()
# the actual object that we `put` in the Queue
# doesn't matter for this trivial example.
q.put(None)
def copy(q):
while True:
do_some_other_work()
try:
q.get()
except Empty:
pass # the message wasn't sent yet.
else:
return # it was sent, so stop this thread too.
if __name__ == '__main__':
q = Queue()
p1 = Process(target=load, args=(q,))
p2 = Process(target=copy, args=(q,))
p1.start()
p2.start()
p1.join()
p2.join()
我正在尝试运行下面的代码,它的主要目的是做更复杂的程序,我用两个循环简化了它。
我不断收到以下错误:(我没有找到错误)
from multiprocessing import Process
def load():
for i in range(0, 10000):
print("loadddinngg", i)
def copy(p1):
# fetch all files
while p1.is_alive():
for i in range(0, 100000):
print("coppppyyy", i)
class multithreading:
def __init__(self):
p1 = Process(target=load, args=())
p2 = Process(target=copy, args=( p1,))
p1.start()
p2.start()
p1.join()
p2.join()
File "C:\Users\untitled10\toDelete.py", line 19, in __init__
p2.start()
File "C:\Program Files\Python38_64bit\lib\multiprocessing\process.py", line 121, in start
self._popen = self._Popen(self)
File "C:\Program Files\Python38_64bit\lib\multiprocessing\context.py", line 224, in _Popen
return _default_context.get_context().Process._Popen(process_obj)
File "C:\Program Files\Python38_64bit\lib\multiprocessing\context.py", line 327, in _Popen
return Popen(process_obj)
File "C:\Program Files\Python38_64bit\lib\multiprocessing\popen_spawn_win32.py", line 93, in __init__
reduction.dump(process_obj, to_child)
File "C:\Program Files\Python38_64bit\lib\multiprocessing\reduction.py", line 60, in dump
ForkingPickler(file, protocol).dump(obj)
TypeError: cannot pickle 'weakref' object
进程对象的 args
被 pickle 以便将该信息发送到新进程(以便它可以在其自己的地址 Python 中解压并重新创建相同的 Python 对象=]).进程对象本身不能被 pickle。
另外,as stated in the documentation:
Note that the start(), join(), is_alive(), terminate() and exitcode methods should only be called by the process that created the process object.
因此我们不能指望在生成的进程中使用 is_alive
。
要在进程之间进行通信,请使用 multiprocessing.Queue
。对于这种特定情况,我们希望 p2
在 p1
完成后停止。这意味着,p1
必须 告诉 p2
它 即将 完成,然后 p2
收到该消息并通过停止响应。
一个简单的例子可能如下所示:
from multiprocessing import Process, Queue
from queue import Empty # exception raised when the queue is empty
import sys
def load(q):
do_some_work()
# the actual object that we `put` in the Queue
# doesn't matter for this trivial example.
q.put(None)
def copy(q):
while True:
do_some_other_work()
try:
q.get()
except Empty:
pass # the message wasn't sent yet.
else:
return # it was sent, so stop this thread too.
if __name__ == '__main__':
q = Queue()
p1 = Process(target=load, args=(q,))
p2 = Process(target=copy, args=(q,))
p1.start()
p2.start()
p1.join()
p2.join()