TypeError: can't pickle module objects

I run pytorch tutorial ‘s Training a Classifier demo, but got this error:

[TypeError Traceback (most recent call last)
—> 14 dataiter = iter(trainloader)strong text
15 images, labels = dataiter.next()

D:\DL_software\envs\pytorch\lib\site-packages\torch\utils\data\dataloader.py in iter(self)
818 def iter(self):
–> 819 return _DataLoaderIter(self)
821 def len(self):

D:\DL_software\envs\pytorch\lib\site-packages\torch\utils\data\dataloader.py in init(self, loader)
558 # before it starts, and del tries to join but will get:
559 # AssertionError: can only join a started process.
–> 560 w.start()
561 self.index_queues.append(index_queue)
562 self.workers.append(w)

D:\DL_software\envs\pytorch\lib\multiprocessing\process.py in start(self)
103 ‘daemonic processes are not allowed to have children’
104 _cleanup()
–> 105 self._popen = self._Popen(self)
106 self._sentinel = self._popen.sentinel
107 # Avoid a refcycle if the target function holds an indirect

D:\DL_software\envs\pytorch\lib\multiprocessing\context.py in _Popen(process_obj)
221 @staticmethod
222 def _Popen(process_obj):
–> 223 return _default_context.get_context().Process._Popen(process_obj)
225 class DefaultContext(BaseContext):

D:\DL_software\envs\pytorch\lib\multiprocessing\context.py in _Popen(process_obj)
320 def _Popen(process_obj):
321 from .popen_spawn_win32 import Popen
–> 322 return Popen(process_obj)
324 class SpawnContext(BaseContext):

D:\DL_software\envs\pytorch\lib\multiprocessing\popen_spawn_win32.py in init(self, process_obj)
63 try:
64 reduction.dump(prep_data, to_child)
—> 65 reduction.dump(process_obj, to_child)
66 finally:
67 set_spawning_popen(None)

D:\DL_software\envs\pytorch\lib\multiprocessing\reduction.py in dump(obj, file, protocol)
58 def dump(obj, file, protocol=None):
59 ‘’‘Replacement for pickle.dump() using ForkingPickler.’’’
—> 60 ForkingPickler(file, protocol).dump(obj)
62 #

TypeError: can’t pickle module objects](http://)

Having the same problem. Any replies?

I have the same problem when I try to introduce another module into Dataset Object. When I delete self.ds,it runs. But I have no idea about keeping self.ds and correct the code.

class Dataset(torch.utils.data.Dataset):
    def __init__(self, config, ds, index):
        self.input_res = config['train']['input_res']   
        self.output_res = config['train']['output_res']   
        self.generateHeatmap = GenerateHeatmap(self.output_res, config['inference']['num_parts'])
        self.ds = ds    # the error source
        self.index = index

Maybe some parameters/variable in you code are module, you can rewrite it to a class.

hmmm I can’t seem to find wherer the error is for me but in case this is useful for others:

def print_dataloader_attributes(dataloader):
    :param dataloader:
    from types import ModuleType
    import inspect

    for attribute in dir(dataloader):
        attribute_value = getattr(dataloader, attribute)
        print(f'{attribute=}, {type(attribute_value)=}\n')
        if isinstance(attribute_value, ModuleType) or inspect.ismodule(attribute_value) or type(attribute_value) is type(inspect):

For me, this error was fixed when I restarted my Jupyter Notebook and re-ran the code.