Я пытаюсь работать с оболочкой Python, используя ctypes.Изменяя приведенный производителем пример, я отправляю кадры в виде объекта ctypes.structure, который работает нормально.Но этот объект нельзя сериализовать, поэтому я не могу настроить очередь для отдельного процесса.Я пытался использовать пафос на основе укропа:
import ctypes
from pathos.helpers import mp
import time
#Define point structure
class HeliosPoint(ctypes.Structure):
#_pack_=1
# projector maximum seems to be uint12
_fields_ = [('x', ctypes.c_uint16),
('y', ctypes.c_uint16),
('r', ctypes.c_uint8),
('g', ctypes.c_uint8),
('b', ctypes.c_uint8),
('i', ctypes.c_uint8)]
frameType = HeliosPoint * 1000
def fill_queue_with_frames(Qf):
while 1:
frame = frameType()
Qf.put(frame)
frame_q = mp.Queue(1000)
ProcessB = mp.Process(target=fill_queue_with_frames, args=(frame_q,))
ProcessB.start()
При использовании пафоса я получаю следующую ошибку:
Traceback (most recent call last):
File "/home/smaug/anaconda3/envs/Laser_p3.6/lib/python3.6/site-packages/multiprocess/queues.py", line 237, in _feed
obj = _ForkingPickler.dumps(obj)
File "/home/smaug/anaconda3/envs/Laser_p3.6/lib/python3.6/site-packages/multiprocess/reduction.py", line 54, in dumps
cls(buf, protocol).dump(obj)
File "/home/smaug/anaconda3/envs/Laser_p3.6/lib/python3.6/pickle.py", line 409, in dump
self.save(obj)
File "/home/smaug/anaconda3/envs/Laser_p3.6/lib/python3.6/pickle.py", line 521, in save
self.save_reduce(obj=obj, *rv)
File "/home/smaug/anaconda3/envs/Laser_p3.6/lib/python3.6/pickle.py", line 610, in save_reduce
save(args)
File "/home/smaug/anaconda3/envs/Laser_p3.6/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/home/smaug/anaconda3/envs/Laser_p3.6/lib/python3.6/pickle.py", line 736, in save_tuple
save(element)
File "/home/smaug/anaconda3/envs/Laser_p3.6/lib/python3.6/pickle.py", line 476, in save
f(self, obj) # Call unbound method with explicit self
File "/home/smaug/anaconda3/envs/Laser_p3.6/lib/python3.6/site-packages/dill/_dill.py", line 1293, in save_type
StockPickler.save_global(pickler, obj)
File "/home/smaug/anaconda3/envs/Laser_p3.6/lib/python3.6/pickle.py", line 922, in save_global
(obj, module_name, name))
_pickle.PicklingError: Can't pickle <class '__main__.HeliosPoint_Array_1000'>: it's not found as __main__.HeliosPoint_Array_1000
Traceback (most recent call last):
File "/home/smaug/anaconda3/envs/Laser_p3.6/lib/python3.6/pickle.py", line 269, in _getattribute
obj = getattr(obj, subpath)
AttributeError: module '__main__' has no attribute 'HeliosPoint_Array_1000'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/smaug/anaconda3/envs/Laser_p3.6/lib/python3.6/pickle.py", line 918, in save_global
obj2, parent = _getattribute(module, name)
File "/home/smaug/anaconda3/envs/Laser_p3.6/lib/python3.6/pickle.py", line 272, in _getattribute
.format(name, obj))
AttributeError: Can't get attribute 'HeliosPoint_Array_1000' on <module '__main__' from 'test_queue.py'>
Можно ли это решить?Есть ли другой способ настроить два параллельных процесса без сериализации?