I have a model which has this operation:
class FuckNet3(nn.Module):
def __init__(self):
super(FuckNet3, self).__init__()
self.welcome_layer = BasicConv(3, 768, 3, 1, 1)
self.welcome_layer2 = BasicConv(3, 128, 3, 1, 1)
self.planes = 256
self.smooth = True
self.num_scales = 6
self.num_levels = 8
self._construct_modules()
def _construct_modules(self, ):
# construct tums
# 8 levels,
for i in range(8):
if i == 0:
setattr(self,
'unet{}'.format(i + 1),
TUMV2(first_level=True,
input_planes=self.planes // 2,
is_smooth=self.smooth,
scales=self.num_scales,
side_channel=512)) # side channel isn't fixed.
else:
setattr(self,
'unet{}'.format(i + 1),
TUMV2(first_level=False,
input_planes=self.planes // 2,
is_smooth=self.smooth,
scales=self.num_scales,
side_channel=self.planes))
# this self.leach causes my trace error
self.leach = nn.ModuleList([BasicConv(
256+512,
128,
kernel_size=(1, 1), stride=(1, 1))] * 8)
def forward(self, x):
base_feature = self.welcome_layer(x)
tum_outs = [getattr(self, 'unet{}'.format(1))(self.leach[0](base_feature), 'none')]
for i in range(1, self.num_levels, 1):
tum_outs.append(
getattr(self, 'unet{}'.format(i + 1))(
self.leach[i](base_feature), tum_outs[i - 1][-1]
)
)
return tuple(tum_outs[0])
I got error of:
File "/home/jintain/.local/lib/python3.6/site-packages/torch/jit/__init__.py", line 1469, in __init__
check_unique(param)
File "/home/jintain/.local/lib/python3.6/site-packages/torch/jit/__init__.py", line 1461, in check_unique
raise ValueError("TracedModules don't support parameter sharing between modules")
ValueError: TracedModules don't support parameter sharing between modules
Does anybody knows How do I solve this problem???
I do not know why it was weights sharing since my module defined in a list and only one module called one time!!
Anybody could solve my problem I will send him a bitcion!