class TNET(nn.Module):
def __init__(self):
super().__init__()
self.weights = nn.Parameter(torch.rand(7), requires_grad=True)
def forward(self, data):
##
return rslts
model = TNET()
model = torch.nn.DataParallel(model).cuda()
#model = torch.nn.parallel.DistributedDataParallel(model.cuda(), device_ids=[0,1])
for i, (imgs, lbls) in enumerate(train_loader):
a = model.weights[0]
File “/opt/anaconda3/lib/python3.6/site-packages/torch/nn/modules/module.py”, line 518, in __ getattr __ type(self).__ name __, name))
AttributeError: ‘DataParallel’ object has no attribute ‘weights’
If I only use model = TNET(), everything works well. But DistributedDataParallel and DataParallel do not have no attribute ‘weights’.
How could I use the self.weights out of the model with DistributedDataParallel?