How can I make my modules compliant with torch.save
in order to avoid this error that I see when I try to save my model after training?
Traceback (most recent call last):
File "main.py", line 351, in <module>
main()
File "main.py", line 225, in main
synth_data, log_model_iw = model_map[args.model_class](**params)
File "/Users/harrisonwilde/Library/Mobile Documents/com~apple~CloudDocs/PhD/Holmes/WeightedDP/models/CUSTOM/dpgan.py", line 250, in DPGAN_runner
torch.save(gan, save)
File "/usr/local/Caskroom/miniconda/base/envs/dp/lib/python3.8/site-packages/torch/serialization.py", line 372, in save
_save(obj, opened_zipfile, pickle_module, pickle_protocol)
File "/usr/local/Caskroom/miniconda/base/envs/dp/lib/python3.8/site-packages/torch/serialization.py", line 476, in _save
pickler.dump(obj)
TypeError: cannot pickle 'torch._C.Generator' object
Forgive the vagueness of the question, I am pretty new to PyTorch but cannot find anything about this error online. The Generator module I have written is as follows:
class Generator(nn.Module):
def __init__(self, latent_dim, output_dim, binary=True):
super(Generator, self).__init__()
def block(in_, out, Activation):
return nn.Sequential(
nn.Linear(in_, out, bias=False),
nn.LayerNorm(out),
Activation(),
)
self.layer_0 = block(latent_dim, latent_dim, nn.Tanh if binary else lambda: nn.LeakyReLU(0.2))
self.layer_1 = block(latent_dim, latent_dim, nn.Tanh if binary else lambda: nn.LeakyReLU(0.2))
self.layer_2 = block(latent_dim, output_dim, nn.Tanh if binary else lambda: nn.LeakyReLU(0.2))
def forward(self, noise):
noise = self.layer_0(noise) + noise
noise = self.layer_1(noise) + noise
noise = self.layer_2(noise)
return noise
Would appreciate any pointers, thanks!