I want to have 2 independent dataloader running in different subprocess.
def inifite_generator(dataloader, dataloader2, keys = ("image", "label")):
# dataloader2 = copy.deepcopy(dataloader)
dataloader = iter(dataloader)
dataloader2 = iter(dataloader2)
d = next(dataloader) # it works well because it is in the main process
data_queue = []
data_q = Queue(maxsize=10)
data_q2 = Queue(maxsize=10)
def start_dataloader(dl, q):
while True:
try:
data = next(dl)
except StopIteration:
data = next(dl)
x_pps = data[keys[0]]
y_pps = data[keys[1]]
for x, y in zip(x_pps, y_pps):
x = x[None, ...]
y = y[None, ...]
data = (x, y)
if q==1:
data_q.put(data, timeout=100)
else:
data_q2.put(data, timeout=100)
p1 = multiprocessing.Process(target=start_dataloader, args=(dataloader, 1, ))
p2 = multiprocessing.Process(target=start_dataloader, args=(dataloader2, 2, ))
p1.start()
p2.start()
use_q2 = False
while True:
if len(keys)==2:
if data_q.empty() and data_q2.empty():
continue
else:
if data_q.empty() or use_q2:
q = data_q2
use_q2 = True
else:
q = data_q
use_q2 = False
data = q.get(timeout=100)
yield data
Error shows:
Process Process-13:
Traceback (most recent call last):
File "/home/jjia/.conda/envs/py37/lib/python3.7/multiprocessing/process.py", line 297, in _bootstrap
self.run()
File "/home/jjia/.conda/envs/py37/lib/python3.7/multiprocessing/process.py", line 99, in run
self._target(*self._args, **self._kwargs)
File "/home/jjia/data/monai/train_mtnet.py", line 308, in start_dataloader
data = next(dl)
File "/home/jjia/.conda/envs/py37/lib/python3.7/site-packages/torch/utils/data/dataloader.py", line 435, in __next__
data = self._next_data()
File "/home/jjia/.conda/envs/py37/lib/python3.7/site-packages/torch/utils/data/dataloader.py", line 1069, in _next_data
idx, data = self._get_data()
File "/home/jjia/.conda/envs/py37/lib/python3.7/site-packages/torch/utils/data/dataloader.py", line 1030, in _get_data
raise RuntimeError('Pin memory thread exited unexpectedly')
RuntimeError: Pin memory thread exited unexpectedly